Needle Engine

Changes between version 3.47.0-beta.3 and 3.47.1-beta
Files changed (8) hide show
  1. src/engine-components/Animation.ts +5 -0
  2. src/engine/engine_application.ts +3 -2
  3. src/engine/engine_networking_streams.ts +264 -58
  4. src/engine/webcomponents/icons.ts +2 -1
  5. src/engine/webcomponents/needle menu/needle-menu.ts +34 -6
  6. src/engine-components/SpriteRenderer.ts +1 -1
  7. src/engine-components/VideoPlayer.ts +88 -4
  8. src/engine-components/Voip.ts +137 -42
src/engine-components/Animation.ts CHANGED
@@ -433,6 +433,10 @@
433
433
  const clip = action.getClip();
434
434
  action.time = Mathf.lerp(options.minMaxOffsetNormalized.x, options.minMaxOffsetNormalized.y, Math.random()) * clip.duration;
435
435
  }
436
+ // If the animation is not running and the time is at the end, reset the time
437
+ else if(!action.isRunning() && action.time >= action.getClip().duration) {
438
+ action.time = 0;
439
+ }
436
440
 
437
441
  // Apply speed
438
442
  if (options?.minMaxSpeed) {
@@ -451,6 +455,7 @@
451
455
  action.clampWhenFinished = true;
452
456
  }
453
457
 
458
+
454
459
  action.paused = false;
455
460
  action.play();
456
461
 
src/engine/engine_application.ts CHANGED
@@ -18,10 +18,11 @@
18
18
  userInteractionCallbacks.length = 0;
19
19
  copy.forEach(cb => cb());
20
20
  }
21
- document.addEventListener('pointerdown', onUserInteraction);
21
+ document.addEventListener('mousedown', onUserInteraction);
22
+ document.addEventListener('pointerup', onUserInteraction);
22
23
  document.addEventListener('click', onUserInteraction);
23
24
  document.addEventListener('dragstart', onUserInteraction);
24
- document.addEventListener('touchstart', onUserInteraction);
25
+ document.addEventListener('touchend', onUserInteraction);
25
26
  document.addEventListener('keydown', onUserInteraction);
26
27
  NeedleXRSession.onXRSessionStart(() => {
27
28
  onUserInteraction();
src/engine/engine_networking_streams.ts CHANGED
@@ -1,9 +1,11 @@
1
- import Peer, { MediaConnection } from "peerjs"
1
+ import Peer, { CallOption, MediaConnection } from "peerjs"
2
2
  import { EventDispatcher } from "three";
3
3
 
4
4
  import { RoomEvents } from "../engine/engine_networking.js";
5
5
  import { UserJoinedOrLeftRoomModel } from "../engine/engine_networking.js";
6
6
  import { getPeerjsInstance } from "../engine/engine_networking_peer.js";
7
+ import { showBalloonMessage } from "./debug/index.js";
8
+ import { Application } from "./engine_application.js";
7
9
  import { type Context } from "./engine_context.js";
8
10
  import type { IModel } from "./engine_networking_types.js";
9
11
  import { type IComponent } from "./engine_types.js";
@@ -32,9 +34,12 @@
32
34
  }
33
35
  export class StreamReceivedEvent {
34
36
  readonly type = NetworkedStreamEvents.StreamReceived;
37
+ readonly userId: string;
35
38
  readonly stream: MediaStream;
36
39
  readonly target: CallHandle;
37
- constructor(stream: MediaStream, target: CallHandle) {
40
+
41
+ constructor(userId: string, stream: MediaStream, target: CallHandle) {
42
+ this.userId = userId;
38
43
  this.stream = stream
39
44
  this.target = target;
40
45
  }
@@ -58,6 +63,7 @@
58
63
  }
59
64
 
60
65
  class CallHandle extends EventDispatcher<any> {
66
+ readonly peerId: string;
61
67
  readonly userId: string;
62
68
  readonly direction: CallDirection;
63
69
  readonly call: MediaConnection;
@@ -82,21 +88,22 @@
82
88
  }
83
89
 
84
90
  get isClosed() {
85
- return !this.isOpen;
91
+ return !this.isOpen || this._isDisposed;
86
92
  }
87
93
 
88
- constructor(userId: string, call: MediaConnection, direction: CallDirection) {
94
+ constructor(userId: string, call: MediaConnection, direction: CallDirection, stream: MediaStream | null = null) {
89
95
  super();
96
+ this.peerId = call.peer;
90
97
  this.userId = userId;
91
98
  this.call = call;
92
99
  this.direction = direction;
93
- this._stream = null;
100
+ this._stream = stream;
94
101
  call.on("stream", stream => {
95
102
  if (debug)
96
- console.log("Receive video", stream.getAudioTracks(), stream.getVideoTracks());
103
+ console.log("Receive stream", "\nAudio:", stream.getAudioTracks(), "\nVideo:", stream.getVideoTracks());
97
104
  this._stream = stream;
98
105
  if (direction === CallDirection.Incoming) {
99
- const args: StreamReceivedEvent = new StreamReceivedEvent(stream, this);
106
+ const args: StreamReceivedEvent = new StreamReceivedEvent(userId, stream, this);
100
107
  this.dispatchEvent(args);
101
108
  }
102
109
  });
@@ -106,6 +113,11 @@
106
113
  }
107
114
  }
108
115
 
116
+ function applySdpTransform(sdp){
117
+ sdp = sdp.replace("a=fmtp:111 minptime=10;useinbandfec=1","a=fmtp:111 ptime=5;useinbandfec=1;stereo=1;maxplaybackrate=48000;maxaveragebitrat=128000;sprop-stereo=1");
118
+ return sdp;
119
+ }
120
+
109
121
  export class PeerHandle extends EventDispatcher<any> {
110
122
 
111
123
  private static readonly instances: Map<string, PeerHandle> = new Map();
@@ -138,20 +150,87 @@
138
150
  }
139
151
 
140
152
  makeCall(peerId: string, stream: MediaStream): CallHandle | undefined {
141
- const opts = { metadata: { userId: this.context.connection.connectionId } };
153
+ if (!stream?.id) {
154
+ if (debug) console.warn("Can not make a call: mediastream has no id or is undefined");
155
+ else console.debug("Can not make a call: mediastream has no id or is undefined");
156
+ return undefined;
157
+ }
158
+
159
+ const opts: CallOption = {
160
+ metadata: {
161
+ userId: this.context.connection.connectionId,
162
+ streamId: stream.id
163
+ },
164
+ sdpTransform: sdp => {
165
+ return applySdpTransform(sdp);
166
+ },
167
+ };
142
168
  const call = this._peer?.call(peerId, stream, opts);
143
- if (call)
144
- return this.registerCall(call, CallDirection.Outgoing);
169
+ if (call) {
170
+ const res = this.registerCall(call, CallDirection.Outgoing, stream);
171
+ if (debug) console.warn(`📞 CALL ${peerId}`, "\nOutgoing:", this._outgoingCalls, "\nIncoming:", this._incomingCalls)
172
+ return res;
173
+ }
174
+ else if (debug) {
175
+ console.error("Failed to make call", peerId, stream, this._peer);
176
+ }
145
177
  return undefined;
146
178
  }
147
179
 
180
+ closeAll() {
181
+ for (const call of this._incomingCalls) {
182
+ call.close();
183
+ }
184
+ for (const call of this._outgoingCalls) {
185
+ call.close();
186
+ }
187
+ this.updateCalls();
188
+ }
189
+
190
+ updateCalls = () => {
191
+ for (let i = this._incomingCalls.length - 1; i >= 0; i--) {
192
+ const call = this._incomingCalls[i];
193
+ if (call.isClosed && !call.isOpening) {
194
+ this._incomingCalls.splice(i, 1);
195
+ }
196
+ }
197
+ for (let i = this._outgoingCalls.length - 1; i >= 0; i--) {
198
+ const call = this._outgoingCalls[i];
199
+ let shouldRemove = false;
200
+ if (call.isClosed && !call.isOpening) {
201
+ if (call.stream?.active) {
202
+ // don't remove the call if the stream is still active
203
+ if (debug) console.warn("!!! Stream is still active, don't remove call", call.userId, "Your id: " + this.context.connection.connectionId);
204
+ }
205
+ else {
206
+ if (debug) console.warn("!!! Remove closed call", call.userId);
207
+ shouldRemove = true;
208
+ }
209
+ }
210
+ // check if the user is still in the room
211
+ if (this.context.connection.userIsInRoom(call.userId) === false) {
212
+ if (debug) console.warn("!!! User is not in room anymore, remove call", call.userId);
213
+ shouldRemove = true;
214
+ }
215
+
216
+ if (shouldRemove) {
217
+ call.close();
218
+ this._outgoingCalls.splice(i, 1);
219
+ }
220
+ }
221
+ }
222
+
148
223
  get peer(): Peer | undefined { return this._peer; }
149
224
 
225
+ get incomingCalls() {
226
+ return this._incomingCalls;
227
+ }
228
+
150
229
  readonly id: string;
151
230
  readonly context: Context;
231
+ private readonly _incomingCalls: CallHandle[] = [];
232
+ private readonly _outgoingCalls: CallHandle[] = [];
152
233
  private _peer: Peer | undefined;
153
- private _incomingCalls: CallHandle[] = [];
154
- private _outgoingCalls: CallHandle[] = [];
155
234
 
156
235
  private constructor(context: Context, id: string) {
157
236
  super();
@@ -168,8 +247,8 @@
168
247
  private _enabledPeer: boolean = false;
169
248
  private onConnectRoomFn: Function = this.onConnectRoom.bind(this);
170
249
  // private onUserJoinedOrLeftRoomFn: Function = this.onUserJoinedOrLeftRoom.bind(this);
171
- private onPeerConnectFn: (id) => void = this.onPeerConnect.bind(this);
172
- private onPeerReceiveCallFn: (call) => void = this.onPeerReceivingCall.bind(this);
250
+ // private onPeerConnectFn: (id) => void = this.onPeerConnect.bind(this);
251
+ // private onPeerReceiveCallFn: (call) => void = this.onPeerReceivingCall.bind(this);
173
252
  // private _connectionPeerIdMap : Map<string, string> = new Map();
174
253
 
175
254
  enable() {
@@ -213,54 +292,106 @@
213
292
 
214
293
  private subscribePeerEvents() {
215
294
  if (!this._peer) return;
216
- this._peer.on("open", this.onPeerConnectFn);
217
- this._peer.on("call", this.onPeerReceiveCallFn);
295
+ this._peer.on("open", this.onPeerConnect);
296
+ this._peer.on("close", this.onPeerClose);
297
+ this._peer.on("call", this.onPeerReceivingCall);
298
+ this._peer.on("disconnected", this.onPeerDisconnected);
299
+ this._peer.on("error", this.onPeerError);
218
300
  // this.context.connection.beginListen(PeerEvent.Connected, this.onRemotePeerConnect.bind(this));
219
301
  // TODO: make connection to all current active calls even if the user is not anymore in the needle room
220
302
  }
221
303
 
222
304
  private unsubscribePeerEvents() {
223
305
  if (!this._peer) return;
224
- this._peer.off("open", this.onPeerConnectFn);
225
- this._peer.off("call", this.onPeerReceiveCallFn);
306
+ this._peer.off("open", this.onPeerConnect);
307
+ this._peer.off("close", this.onPeerClose);
308
+ this._peer.off("call", this.onPeerReceivingCall);
309
+ this._peer.off("disconnected", this.onPeerDisconnected);
310
+ this._peer.off("error", this.onPeerError);
226
311
  // this.context.connection.stopListen(PeerEvent.Connected, this.onRemotePeerConnect.bind(this));
227
312
  }
228
313
 
229
- private onPeerConnect(id): void {
230
- if (debug)
231
- console.log("Peer connected as", id);
314
+ /**
315
+ * Emitted when a connection to the PeerServer is established. You may use the peer before this is emitted, but messages to the server will be queued. id is the brokering ID of the peer (which was either provided in the constructor or assigned by the server).
316
+ * @param id ID of the peer
317
+ */
318
+ private onPeerConnect = (id) => {
319
+ if (debug) console.log("PEER opened as", id);
320
+ if (id === null) {
321
+ console.error("Peer connection failed", id);
322
+ return;
323
+ }
232
324
  this.context.connection.send(NetworkedStreamEvents.Connected, new PeerUserConnectedModel(this, id));
233
325
  }
234
326
 
235
- private onPeerReceivingCall(call: MediaConnection): void {
236
- call.answer();
237
- this.registerCall(call, CallDirection.Incoming);
327
+ /** Emitted when the peer is destroyed and can no longer accept or create any new connections. At this time, the peer's connections will all be closed. */
328
+ private onPeerClose = () => {
329
+ if (debug) console.log("PEER closed");
330
+ this.updateCalls();
238
331
  }
239
332
 
240
- private registerCall(call: MediaConnection, direction: CallDirection): CallHandle {
333
+ /** Emitted when the peer is disconnected from the signalling server, either manually or because the connection to the signalling server was lost. */
334
+ private onPeerDisconnected = () => {
335
+ if (debug) console.log("PEER disconnected");
336
+ this.updateCalls();
337
+ }
241
338
 
339
+ /**
340
+ * Errors on the peer are almost always fatal and will destroy the peer. Errors from the underlying socket and PeerConnections are forwarded here.
341
+ */
342
+ private onPeerError = (err: Error) => {
343
+ if (debug) console.error("PEER error", err);
344
+ }
345
+
346
+ private onPeerReceivingCall = (call: MediaConnection) => {
347
+
348
+ call.answer(undefined, {
349
+ sdpTransform: sdp => {
350
+ return applySdpTransform(sdp);
351
+ },
352
+ });
353
+ this.registerCall(call, CallDirection.Incoming, null);
354
+
355
+ // if (call.type != "media") {
356
+ // call.answer();
357
+ // this.registerCall(call, CallDirection.Incoming, null);
358
+ // }
359
+ // else {
360
+ // if (!Application.userInteractionRegistered) {
361
+ // showBalloonMessage("You have an incoming call. Please click on the screen to answer it.");
362
+ // }
363
+ // Application.registerWaitForInteraction(() => {
364
+ // })
365
+ // }
366
+ }
367
+
368
+ private registerCall(call: MediaConnection, direction: CallDirection, stream: MediaStream | null): CallHandle {
369
+
242
370
  const meta = call.metadata;
243
371
  if (!meta || !meta.userId) {
244
372
  console.error("Missing call metadata", call);
245
373
  }
374
+
246
375
  const userId = meta.userId;
247
376
 
248
- if (direction === CallDirection.Incoming && debug) console.log("Receive call from", call.metadata);
249
- else if (debug) console.log("Make call to", call.metadata);
377
+ if (direction === CallDirection.Incoming && debug) console.warn("Receive call from", call.metadata, call.connectionId);
378
+ else if (debug) {
379
+ console.warn("→ Make call to", call.metadata);
380
+ }
381
+ const calls = direction === CallDirection.Incoming ? this._incomingCalls : this._outgoingCalls;
250
382
 
251
- const arr = direction === CallDirection.Incoming ? this._incomingCalls : this._outgoingCalls;
252
- const handle = new CallHandle(userId, call, direction);
253
- arr.push(handle);
383
+ const handle = new CallHandle(userId, call, direction, stream);
384
+ calls.push(handle);
385
+
254
386
  call.on("error", err => {
255
387
  console.error("Call error", err);
256
388
  });
257
389
  call.on("close", () => {
258
- if (debug)
259
- console.log("Call ended", call.metadata);
260
- call.close();
261
- const index = arr.indexOf(handle);
262
- if (index !== -1)
263
- arr.splice(index, 1);
390
+ if (debug) console.log("Call ended", call.metadata);
391
+ const index = calls.indexOf(handle);
392
+ if (index !== -1) calls.splice(index, 1);
393
+ handle.close();
394
+ this.dispatchEvent(new StreamEndedEvent(userId, direction));
264
395
  });
265
396
 
266
397
  handle.addEventListener(NetworkedStreamEvents.StreamEnded, e => {
@@ -274,11 +405,13 @@
274
405
  });
275
406
 
276
407
  call.on("stream", () => {
408
+ if (debug) console.log("Received stream for call", call.metadata);
277
409
  // workaround for https://github.com/peers/peerjs/issues/636
278
410
  let intervalCounter = 0;
279
411
  const closeInterval = setInterval(() => {
280
412
  const isFirstInterval = intervalCounter === 0;
281
413
  if (!handle.isOpen && isFirstInterval) {
414
+ if (debug) console.warn("Close call because stream is not active", call.metadata);
282
415
  intervalCounter += 1;
283
416
  clearInterval(closeInterval);
284
417
  handle.close();
@@ -306,10 +439,16 @@
306
439
  // userId: string;
307
440
  // }
308
441
 
442
+ /**
443
+ * This class is responsible for managing the sending and receiving of streams between peers.
444
+ */
309
445
  export class NetworkedStreams extends EventDispatcher<any> {
310
446
 
311
- static create(comp: IComponent) {
312
- const peer = PeerHandle.getOrCreate(comp.context, comp.context.connection.connectionId!);
447
+ /**
448
+ * Create a new NetworkedStreams instance
449
+ */
450
+ static create(comp: IComponent, guid?: string) {
451
+ const peer = PeerHandle.getOrCreate(comp.context, guid || comp.context.connection.connectionId || comp.guid);
313
452
  return new NetworkedStreams(comp.context, peer);
314
453
  }
315
454
 
@@ -319,6 +458,9 @@
319
458
  // private _receiveVideoStreamListeners: Array<(info: IncomingStreamArgs) => void> = [];
320
459
  private _sendingStreams: Map<MediaStream, CallHandle[]> = new Map();
321
460
 
461
+ /**
462
+ * If true, will log debug information
463
+ */
322
464
  debug: boolean = false;
323
465
 
324
466
  constructor(context: Context, peer: PeerHandle) {
@@ -342,25 +484,31 @@
342
484
  if (_steam) {
343
485
  const calls = this._sendingStreams.get(_steam);
344
486
  if (calls) {
345
- if (this.debug)
346
- console.log("Closing calls", calls);
347
487
  for (const call of calls) {
348
488
  call.close();
349
489
  }
490
+ calls.length = 0;
350
491
  }
351
492
  this._sendingStreams.delete(_steam);
352
493
  if (calls && this.debug)
353
- console.log("Currently sending", this._sendingStreams);
494
+ this.debugLogCurrentState();
354
495
  }
496
+ this.updateSendingCalls();
355
497
  }
356
498
 
357
499
  // private onConnectRoomFn: Function = this.onConnectRoom.bind(this);
358
500
  // private onUserConnectedFn: Function = this.onUserConnected.bind(this);
359
501
  // private onUserLeftFn: Function = this.onUserLeft.bind(this);
360
502
 
503
+ private _enabled: boolean = false;
504
+
505
+ get enabled() { return this._enabled; }
506
+
361
507
  enable() {
508
+ if (this._enabled) return;
509
+ this._enabled = true;
362
510
  this.peer.enable();
363
- this.peer.addEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream);
511
+ this.peer.addEventListener(NetworkedStreamEvents.StreamReceived, this.onCallStreamReceived);
364
512
  //@ts-ignore
365
513
  this.peer.addEventListener(NetworkedStreamEvents.StreamEnded, this.onCallEnded);
366
514
  // this.peer.addEventListener(PeerEvent.UserJoined, this.onUserJoinedPeer);
@@ -368,11 +516,15 @@
368
516
  this.context.connection.beginListen(RoomEvents.JoinedRoom, this.onJoinedRoom);
369
517
  this.context.connection.beginListen(RoomEvents.UserJoinedRoom, this.onJoinedRoom);
370
518
  this.context.connection.beginListen(RoomEvents.UserLeftRoom, this.onUserLeft);
519
+ this.context.connection.beginListen(RoomEvents.LeftRoom, this.onLeftRoom);
520
+ this._tickIntervalId = setInterval(this.tick, 5_000);
371
521
  }
372
522
 
373
523
  disable() {
524
+ if (!this._enabled) return;
525
+ this._enabled = false;
374
526
  this.peer.disable();
375
- this.peer.removeEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream);
527
+ this.peer.removeEventListener(NetworkedStreamEvents.StreamReceived, this.onCallStreamReceived);
376
528
  //@ts-ignore
377
529
  this.peer.removeEventListener(NetworkedStreamEvents.StreamEnded, this.onCallEnded);
378
530
  // this.peer.removeEventListener(PeerEvent.UserJoined, this.onUserJoinedPeer);
@@ -380,8 +532,19 @@
380
532
  this.context.connection.stopListen(RoomEvents.JoinedRoom, this.onJoinedRoom);
381
533
  this.context.connection.stopListen(RoomEvents.UserJoinedRoom, this.onJoinedRoom);
382
534
  this.context.connection.stopListen(RoomEvents.UserLeftRoom, this.onUserLeft);
535
+ this.context.connection.stopListen(RoomEvents.LeftRoom, this.onLeftRoom);
536
+ if (this._tickIntervalId != undefined) {
537
+ clearInterval(this._tickIntervalId);
538
+ this._tickIntervalId = undefined;
539
+ }
383
540
  }
384
541
 
542
+ private _tickIntervalId?: number;
543
+
544
+ private tick = () => {
545
+ this.updateSendingCalls();
546
+ }
547
+
385
548
  // private onUserJoinedPeer = (evt) => {
386
549
  // if (!this.context.connection.isConnected && evt.userId) {
387
550
  // this.startCallWithUserIfNotAlready(evt.userId);
@@ -392,32 +555,57 @@
392
555
  // and if the user that joined is not yet receiving our video stream we want to start a stream with them
393
556
  // https://github.com/needle-tools/needle-tiny/issues/697#issuecomment-1510425539
394
557
  private onJoinedRoom = (evt) => {
395
- if (this.debug) console.log(`${evt.userId} joined room and I'm currently sending ${this._sendingStreams.size} streams`);
396
- if (this._sendingStreams.size > 0)
558
+ if (this._sendingStreams.size > 0) {
559
+ if (this.debug) console.warn(`${evt?.userId ? `User ${evt.userId}` : "You"} joined room`, evt, this._sendingStreams.size);
397
560
  this.updateSendingCalls();
561
+ }
398
562
  }
563
+ /** This is when the local user leaves the room */
564
+ private onLeftRoom = (evt) => {
565
+ if (this.debug) console.warn(`${evt?.userId || "You"} left room`, evt);
566
+ this.stopCallsToUsersThatAreNotInTheRoomAnymore();
567
+ this.peer.closeAll();
399
568
 
400
- private onReceiveStream = (evt) => {
401
- if (this.debug)
402
- console.log("RECEIVE STREAM", evt);
569
+ }
570
+
571
+ private onCallStreamReceived = (evt: StreamReceivedEvent) => {
572
+ if (this.debug) console.log("Call with " + evt.userId + " started");
403
573
  this.dispatchEvent({ type: NetworkedStreamEvents.StreamReceived, target: this, stream: evt.stream, userId: evt.userId });
574
+ if (this.debug) {
575
+ this.debugLogCurrentState();
576
+ }
404
577
  }
405
578
 
406
579
  private onCallEnded = (evt: StreamEndedEvent) => {
407
- this.dispatchEvent(evt)
580
+ if (this.debug) console.log("Call with " + evt.userId + " ended");
581
+ this.dispatchEvent(evt);
582
+ if (this.debug) {
583
+ this.debugLogCurrentState();
584
+ }
408
585
  }
409
586
 
410
587
  private onUserConnected = (user: PeerUserConnectedModel) => {
411
588
  // console.log(this.peer.id, user.guid)
412
589
  if (this.peer.id === user.guid) {
413
590
  if (this.debug)
414
- console.log("USER CONNECTED", user.guid, user);
591
+ console.log("PEER USER CONNECTED", user.guid, user, this._sendingStreams.size);
415
592
  const stream = this._sendingStreams.keys().next().value;
593
+
594
+ // check if we already have a call with this user
595
+ // const existing = this._outgoingCalls.find(c => c.call.peer === peerId && c.stream === stream);
596
+ // if (existing) {
597
+ // console.warn("Already have a call with this user", peerId, stream);
598
+ // return existing;
599
+ // }
416
600
  this.peer.makeCall(user.peerId, stream);
417
601
  }
602
+ else {
603
+ if (debug) console.log("Unknown user connected", user.guid, user.peerId)
604
+ }
418
605
  }
419
606
 
420
607
  private onUserLeft = (_: UserJoinedOrLeftRoomModel) => {
608
+ if (this.debug) console.log("User left room: " + _.userId);
421
609
  this.stopCallsToUsersThatAreNotInTheRoomAnymore();
422
610
  }
423
611
 
@@ -428,24 +616,23 @@
428
616
  const calls = this._sendingStreams.get(stream) || [];
429
617
  for (const userId of this.context.connection.usersInRoom()) {
430
618
  if (userId === localUserId) continue;
431
- const existing = calls.find(c => c.userId === userId);
432
- if (!existing || existing.stream?.active === false) {
433
- if (this.debug)
434
- console.log("Starting call to", userId, localUserId)
435
- const handle = this.peer.makeCall(this.peer.getPeerIdFromUserId(userId), stream);
619
+ const peerId = this.peer.getPeerIdFromUserId(userId);
620
+ const existing = calls.find(c => c.peerId === peerId && c.direction === CallDirection.Outgoing && !c.isClosed && c.stream?.active);
621
+ if (!existing) {
622
+ const handle = this.peer.makeCall(peerId, stream);
436
623
  if (handle) {
437
624
  startedNewCall = true;
438
625
  calls.push(handle);
439
626
  }
440
627
  }
628
+ else if (debug) {
629
+ console.debug("Already have a call with user " + userId + " / peer " + peerId);
630
+ }
441
631
  }
442
632
 
443
633
  this._sendingStreams.set(stream, calls);
444
634
  }
445
635
  this.stopCallsToUsersThatAreNotInTheRoomAnymore();
446
- if (startedNewCall && this.debug) {
447
- console.log("Currently sending", this._sendingStreams);
448
- }
449
636
  }
450
637
 
451
638
  // private startCallWithUserIfNotAlready(userId: string) {
@@ -465,19 +652,38 @@
465
652
  // }
466
653
 
467
654
  private stopCallsToUsersThatAreNotInTheRoomAnymore() {
655
+
468
656
  for (const stream of this._sendingStreams.keys()) {
469
657
  const calls = this._sendingStreams.get(stream);
470
658
  if (!calls) continue;
471
659
  for (let i = calls.length - 1; i >= 0; i--) {
472
660
  const call = calls[i];
473
661
  if (!this.context.connection.userIsInRoom(call.userId)) {
662
+ if (debug) console.log(`Remove call ${[i]} to user that is not in room anymore ${call.userId}`);
474
663
  call.close();
475
664
  calls.splice(i, 1);
476
665
  }
666
+ else if (debug) {
667
+ if (this.context.connection.connectionId === call.userId)
668
+ console.warn(`You are still in the room [${i}] ${call.userId}`);
669
+ else {
670
+ console.log(`User is still in room [${i}] ${call.userId}`);
671
+ }
672
+ }
477
673
  }
478
674
  }
675
+
676
+ this.peer.updateCalls();
677
+
678
+ if (this.debug) {
679
+ this.debugLogCurrentState();
680
+ }
479
681
  }
480
682
 
683
+ private debugLogCurrentState() {
684
+ console.warn(`You (${this.context.connection.connectionId}) are currently sending ${this._sendingStreams.size} and receiving ${this.peer.incomingCalls.length} calls (${this.peer.incomingCalls.map(c => c.userId).join(", ")})`, this.peer.incomingCalls);
685
+ }
686
+
481
687
  // const call = peer.call(peerId, stream);
482
688
  }
483
689
 
src/engine/webcomponents/icons.ts CHANGED
@@ -11,7 +11,8 @@
11
11
  span.style.maxWidth = "48px";
12
12
  span.style.maxHeight = "48px";
13
13
  span.style.overflow = "hidden";
14
- span.classList.add("material-symbols-outlined");
14
+ span.classList.add("material-symbols-outlined", "notranslate");
15
+ span.setAttribute("translate", "no");
15
16
  span.innerText = str;
16
17
  return span;
17
18
  }
src/engine/webcomponents/needle menu/needle-menu.ts CHANGED
@@ -1,4 +1,3 @@
1
- import { isDevEnvironment } from "../../debug/index.js";
2
1
  import type { Context } from "../../engine_context.js";
3
2
  import { hasCommercialLicense, hasProLicense, onLicenseCheckResultChanged } from "../../engine_license.js";
4
3
  import { isLocalNetwork } from "../../engine_networking_utils.js";
@@ -47,6 +46,36 @@
47
46
  class?: "row2";
48
47
  }
49
48
 
49
+ /**
50
+ * The NeedleMenu is a menu that can be displayed in the needle engine webcomponent or in VR/AR sessions.
51
+ * The menu can be used to add buttons to the needle engine that can be used to interact with the application.
52
+ * The menu can be positioned at the top or the bottom of the needle engine webcomponent
53
+ *
54
+ * @example Create a button using the NeedleMenu
55
+ * ```typescript
56
+ * onStart(ctx => {
57
+ * ctx.menu.appendChild({
58
+ * label: "Open Google",
59
+ * icon: "google",
60
+ * onClick: () => { window.open("https://www.google.com", "_blank") }
61
+ * });
62
+ * })
63
+ * ```
64
+ *
65
+ * Buttons can be added to the menu using the {@link NeedleMenu#appendChild} method or by sending a postMessage event to the needle engine with the type "needle:menu". Use the {@link NeedleMenuPostMessageModel} model to create buttons with postMessage.
66
+ * @example Create a button using a postmessage
67
+ * ```javascript
68
+ * window.postMessage({
69
+ * type: "needle:menu",
70
+ * button: {
71
+ * label: "Open Google",
72
+ * icon: "google",
73
+ * onclick: "https://www.google.com",
74
+ * target: "_blank",
75
+ * }
76
+ * }, "*");
77
+ * ```
78
+ */
50
79
  export class NeedleMenu {
51
80
  private readonly _context: Context;
52
81
  private readonly _menu: NeedleMenuElement;
@@ -512,12 +541,11 @@
512
541
  font-size: 1.2rem;
513
542
  padding: .6rem .5rem;
514
543
  }
515
- .compact .logo {
516
- padding-left: 0;
517
- margin-left: 0.3rem;
518
- }
519
544
  .compact.has-options .logo {
520
545
  border: none;
546
+ padding-left: 0;
547
+ margin-left: 1rem;
548
+ margin-bottom: .02rem;
521
549
  }
522
550
  .compact .options > button {
523
551
  display: flex;
@@ -568,7 +596,7 @@
568
596
  </div>
569
597
  </div>
570
598
  <div class="logo">
571
- <span class="madewith">powered by</span>
599
+ <span class="madewith notranslate">powered by</span>
572
600
  </div>
573
601
  </div>
574
602
  <button class="compact-menu-button"></button>
src/engine-components/SpriteRenderer.ts CHANGED
@@ -306,7 +306,7 @@
306
306
  }
307
307
  this.sharedMaterial = mat;
308
308
  this._currentSprite = new Mesh(SpriteUtils.getOrCreateGeometry(sprite), mat);
309
- this._currentSprite.renderOrder = this.renderOrder;
309
+ this._currentSprite.renderOrder = Math.round(this.renderOrder);
310
310
  NEEDLE_progressive.assignTextureLOD(mat, 0);
311
311
  }
312
312
  else {
src/engine-components/VideoPlayer.ts CHANGED
@@ -44,11 +44,28 @@
44
44
  MaterialOverride = 3,
45
45
  }
46
46
 
47
+ /**
48
+ * The VideoPlayer component can be used to playback video clips from urls, streams or m3u8 playlists (livestreams)
49
+ * @example Add a video player component to a game object and set the url to a video file. The video will start playing once the object becomes active in your scene
50
+ * ```typescript
51
+ * // Add a video player component to a game object and set the url to a video file. The video will start playing once the object becomes active in your scene
52
+ * const videoPlayer = addComponent(obj, VideoPlayer, {
53
+ * url: "https://www.w3schools.com/html/mov_bbb.mp4",
54
+ * playOnAwake: true,
55
+ * });
56
+ * ```
57
+ */
47
58
  export class VideoPlayer extends Behaviour {
48
59
 
60
+ /**
61
+ * When true the video will start playing as soon as the component is enabled
62
+ */
49
63
  @serializable()
50
64
  playOnAwake: boolean = true;
51
65
 
66
+ /**
67
+ * The aspect mode to use for the video. If
68
+ */
52
69
  @serializable()
53
70
  aspectMode: AspectMode = AspectMode.None;
54
71
 
@@ -58,8 +75,15 @@
58
75
  // set a default src, this should not be undefined
59
76
  @serializable()
60
77
  private source: VideoSource = VideoSource.Url;
78
+
79
+ /**
80
+ * The video clip url to play.
81
+ */
61
82
  @serializable(URL)
62
83
  get url() { return this._url }
84
+ /**
85
+ * The video clip to play.
86
+ */
63
87
  set url(val: string | null) {
64
88
  const prev = this._url;
65
89
  const changed = prev !== val;
@@ -89,10 +113,17 @@
89
113
  private time: number = 0;
90
114
 
91
115
  private _playbackSpeed: number = 1;
116
+ /**
117
+ * Get the video playback speed. Increasing this value will speed up the video, decreasing it will slow it down.
118
+ * @default 1
119
+ */
92
120
  @serializable()
93
121
  get playbackSpeed(): number {
94
122
  return this._videoElement?.playbackRate ?? this._playbackSpeed;
95
123
  }
124
+ /**
125
+ * Set the video playback speed. Increasing this value will speed up the video, decreasing it will slow it down.
126
+ */
96
127
  set playbackSpeed(val: number) {
97
128
  this._playbackSpeed = val;
98
129
  if (this._videoElement)
@@ -110,9 +141,15 @@
110
141
  this._videoElement.loop = val;
111
142
  }
112
143
 
144
+ /**
145
+ * @returns the current time of the video in seconds
146
+ */
113
147
  get currentTime(): number {
114
148
  return this._videoElement?.currentTime ?? this.time;
115
149
  }
150
+ /**
151
+ * set the current time of the video in seconds
152
+ */
116
153
  set currentTime(val: number) {
117
154
  if (this._videoElement) {
118
155
  this._videoElement.currentTime = val;
@@ -120,6 +157,9 @@
120
157
  else this.time = val;
121
158
  }
122
159
 
160
+ /**
161
+ * @returns true if the video is currently playing
162
+ */
123
163
  get isPlaying(): boolean {
124
164
  const video = this._videoElement;
125
165
  if (video) {
@@ -145,30 +185,58 @@
145
185
  }
146
186
  }
147
187
 
188
+ /**
189
+ * the material that is used to render the video
190
+ */
148
191
  get videoMaterial() {
149
192
  if (!this._videoMaterial) if (!this.create(false)) return null;
150
193
  return this._videoMaterial;
151
194
  }
152
195
 
196
+ /**
197
+ * the video texture that is used to render the video
198
+ */
153
199
  get videoTexture() {
154
200
  if (!this._videoTexture) if (!this.create(false)) return null;
155
201
  return this._videoTexture;
156
202
  }
157
203
 
204
+ /**
205
+ * the HTMLVideoElement that is used to play the video
206
+ */
158
207
  get videoElement() {
159
208
  if (!this._videoElement) if (!this.create(false)) return null;
160
209
  return this._videoElement!;
161
210
  }
162
211
 
212
+ /**
213
+ * Request the browser to enter picture in picture mode
214
+ * @link https://developer.mozilla.org/en-US/docs/Web/API/Picture-in-Picture_API
215
+ * @returns the promise returned by the browser
216
+ */
217
+ requestPictureInPicture() {
218
+ if (this._videoElement) return this._videoElement.requestPictureInPicture();
219
+ return null;
220
+ }
221
+
222
+ /**
223
+ * @returns true if the video is muted
224
+ */
163
225
  get muted() {
164
226
  return this._videoElement?.muted ?? this._muted;
165
227
  }
228
+ /**
229
+ * set the video to be muted
230
+ */
166
231
  set muted(val: boolean) {
167
232
  this._muted = val;
168
233
  if (this._videoElement) this._videoElement.muted = val;
169
234
  }
170
235
  private _muted: boolean = false;
171
236
 
237
+ /**
238
+ * The current video clip that is being played
239
+ */
172
240
  get currentVideo() {
173
241
  return this.clip;
174
242
  }
@@ -182,10 +250,11 @@
182
250
  }
183
251
  }
184
252
  private get audioOutputMode() { return this._audioOutputMode; }
185
-
186
253
  private _audioOutputMode: VideoAudioOutputMode = VideoAudioOutputMode.Direct;
187
254
 
188
- /** Set this to false to pause video playback while the tab is not active */
255
+ /** Set this to false to pause video playback while the tab is not active
256
+ * @default true
257
+ */
189
258
  playInBackground: boolean = true;
190
259
 
191
260
  private _crossOrigin: string | null = "anonymous";
@@ -241,6 +310,7 @@
241
310
  }
242
311
  }
243
312
 
313
+ /** @internal */
244
314
  onEnable(): void {
245
315
  if (debug) console.log("VideoPlayer.onEnable", VideoSource[this.source], this.clip, this.url, this)
246
316
  window.addEventListener('visibilitychange', this.visibilityChanged);
@@ -258,6 +328,7 @@
258
328
  else this._overlay?.stop();
259
329
  }
260
330
 
331
+ /** @internal */
261
332
  onDisable(): void {
262
333
  window.removeEventListener('visibilitychange', this.visibilityChanged);
263
334
  this._overlay?.stop();
@@ -278,6 +349,7 @@
278
349
  }
279
350
  }
280
351
 
352
+ /** @internal */
281
353
  onDestroy(): void {
282
354
  if (this._videoElement) {
283
355
  this.videoElement?.remove();
@@ -291,6 +363,9 @@
291
363
 
292
364
  private _receivedInput: boolean = false;
293
365
 
366
+ /**
367
+ * @internal
368
+ */
294
369
  constructor() {
295
370
  super();
296
371
  awaitInput(() => {
@@ -308,7 +383,7 @@
308
383
  }
309
384
  }
310
385
 
311
- /** start playing video source */
386
+ /** start playing the video source */
312
387
  play() {
313
388
  if (!this._videoElement) this.create(false);
314
389
  if (!this._videoElement) {
@@ -340,6 +415,9 @@
340
415
  if (debug) console.log("play", this._videoElement, this.time);
341
416
  }
342
417
 
418
+ /**
419
+ * Stop the video playback. This will reset the video to the beginning
420
+ */
343
421
  stop() {
344
422
  this._isPlaying = false;
345
423
  this.time = 0;
@@ -349,6 +427,9 @@
349
427
  if (debug) console.log("STOP", this);
350
428
  }
351
429
 
430
+ /**
431
+ * Pause the video playback
432
+ */
352
433
  pause(): void {
353
434
  this.time = this._videoElement?.currentTime ?? 0;
354
435
  this._isPlaying = false;
@@ -416,10 +497,13 @@
416
497
 
417
498
  private _overlay: VideoOverlay | null = null;
418
499
 
500
+ /**
501
+ * If true the video will be rendered in screenspace mode and overlayed on top of the scene.
502
+ * Alternatively you can also request the video to be played in PictureInPicture mode by calling `requestPictureInPicture()`
503
+ */
419
504
  get screenspace(): boolean {
420
505
  return this._overlay?.enabled ?? false;
421
506
  }
422
-
423
507
  set screenspace(val: boolean) {
424
508
  if (val) {
425
509
  if (!this._videoTexture) return;
src/engine-components/Voip.ts CHANGED
@@ -1,11 +1,13 @@
1
1
  import { AudioAnalyser } from "three";
2
2
 
3
3
  import { isDevEnvironment, showBalloonError, showBalloonWarning } from "../engine/debug/index.js";
4
+ import { Application } from "../engine/engine_application.js";
4
5
  import { RoomEvents } from "../engine/engine_networking.js";
5
6
  import { disposeStream, NetworkedStreamEvents, NetworkedStreams, StreamEndedEvent, StreamReceivedEvent } from "../engine/engine_networking_streams.js"
6
7
  import { serializable } from "../engine/engine_serialization_decorator.js";
7
8
  import { getParam, microphonePermissionsGranted } from "../engine/engine_utils.js";
8
9
  import { delay } from "../engine/engine_utils.js";
10
+ import { getIconElement } from "../engine/webcomponents/icons.js";
9
11
  import { Behaviour } from "./Component.js";
10
12
 
11
13
  export const noVoip = "noVoip";
@@ -14,7 +16,6 @@
14
16
  /**
15
17
  * The voice over ip component (Voip) allows you to send and receive audio streams to other users in the same networked room.
16
18
  * It requires a networking connection to be working (e.g. by having an active SyncedRoom component in the scene or by connecting to a room manually).
17
-
18
19
  */
19
20
  export class Voip extends Behaviour {
20
21
 
@@ -31,27 +32,54 @@
31
32
  @serializable()
32
33
  runInBackground: boolean = true;
33
34
 
35
+ /**
36
+ * When enabled, a menu button will be created to allow the user to toggle VOIP on and off
37
+ */
38
+ @serializable()
39
+ createMenuButton: boolean = true;
40
+
41
+ /**
42
+ * When enabled debug messages will be printed to the console. This is useful for debugging audio issues. You can also append ?debugvoip to the URL to enable this.
43
+ */
34
44
  debug: boolean = false;
35
45
 
36
- private _net!: NetworkedStreams;
46
+ private _net?: NetworkedStreams;
47
+ private _menubutton?: HTMLElement;
37
48
 
38
49
  /** @internal */
39
50
  awake() {
40
51
  if (debugParam) this.debug = true;
41
52
  if (this.debug) {
53
+ console.log("VOIP debugging: press 'v' to toggle mute or 'c' to toggle connect/disconnect");
42
54
  window.addEventListener("keydown", async (evt) => {
43
- if (evt.key === "v") {
44
- console.log("MUTE?", !this.isMuted)
45
- this.setMuted(!this.isMuted);
55
+ const key = evt.key.toLowerCase();
56
+ switch (key) {
57
+ case "v":
58
+ console.log("MUTE?", !this.isMuted)
59
+ this.setMuted(!this.isMuted);
60
+ break;
61
+ case "c":
62
+ if (this.isSending) this.disconnect();
63
+ else this.connect();
64
+ break;
46
65
  }
47
66
  });
67
+ // mute unfocused
68
+ window.addEventListener("blur", () => {
69
+ console.log("VOIP: MUTE ON BLUR")
70
+ this.setMuted(true);
71
+ });
72
+ window.addEventListener("focus", () => {
73
+ console.log("VOIP: UNMUTE ON FOCUS")
74
+ this.setMuted(false);
75
+ });
48
76
  }
49
77
  }
50
78
 
51
79
  /** @internal */
52
80
  onEnable(): void {
53
81
  if (!this._net) this._net = NetworkedStreams.create(this);
54
- // this._net.debug = this.debug;
82
+ if (this.debug) this._net.debug = true;
55
83
  this._net.addEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream);
56
84
  this._net.addEventListener(NetworkedStreamEvents.StreamEnded, this.onStreamEnded);
57
85
  this._net.enable();
@@ -62,52 +90,71 @@
62
90
  this.context.connection.beginListen(RoomEvents.JoinedRoom, this.onJoinedRoom);
63
91
  this.context.connection.beginListen(RoomEvents.LeftRoom, this.onLeftRoom);
64
92
  this.onEnabledChanged();
65
-
93
+ this.updateButton();
66
94
  window.addEventListener("visibilitychange", this.onVisibilityChanged);
67
95
  }
68
96
 
69
97
  /** @internal */
70
98
  onDisable(): void {
71
- this._net.stopSendingStream(this._outputStream);
72
- //@ts-ignore
73
- this._net.removeEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream);
74
- //@ts-ignore
75
- this._net.removeEventListener(NetworkedStreamEvents.StreamEnded, this.onStreamEnded)
76
- this._net?.disable();
99
+ if (this._net) {
100
+ this._net.stopSendingStream(this._outputStream);
101
+ //@ts-ignore
102
+ this._net.removeEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream);
103
+ //@ts-ignore
104
+ this._net.removeEventListener(NetworkedStreamEvents.StreamEnded, this.onStreamEnded)
105
+ this._net?.disable();
106
+ }
77
107
  this.context.connection.stopListen(RoomEvents.JoinedRoom, this.onJoinedRoom);
78
108
  this.context.connection.stopListen(RoomEvents.LeftRoom, this.onLeftRoom);
79
109
  this.onEnabledChanged();
80
-
110
+ this.updateButton();
81
111
  window.removeEventListener("visibilitychange", this.onVisibilityChanged);
82
112
  }
83
113
 
114
+ /** @internal */
115
+ onDestroy(): void {
116
+ this._menubutton?.remove();
117
+ this._menubutton = undefined;
118
+ }
119
+
120
+ /** Set via the mic button (e.g. when the websocket connection closes and rejoins but the user was muted before we don't want to enable VOIP again automatically) */
121
+ private _allowSending = true;
84
122
  private _outputStream: MediaStream | null = null;
85
123
 
86
124
  /**
87
- * Returns true if the component is currently sending audio
125
+ * @returns true if the component is currently sending audio
88
126
  */
89
127
  get isSending() { return this._outputStream != null && this._outputStream.active; }
90
128
 
91
- /** Start sending audio */
129
+
130
+ /** Start sending audio. */
92
131
  async connect(audioSource?: MediaTrackConstraints) {
132
+ if (!this._net) {
133
+ console.error("Cannot connect to voice chat - NetworkedStreams not initialized. Make sure the component is enabled before calling this method.");
134
+ return false;
135
+ }
93
136
  if (!this.context.connection.isConnected) {
94
137
  console.error("Cannot connect to voice chat - not connected to server");
138
+ this.updateButton();
95
139
  return false;
96
140
  }
97
141
  else if (!await microphonePermissionsGranted()) {
98
142
  console.error("Cannot connect to voice chat - microphone permissions not granted");
143
+ this.updateButton();
99
144
  return false;
100
145
  }
101
-
102
- this._net.stopSendingStream(this._outputStream);
146
+ this._allowSending = true;
147
+ this._net?.stopSendingStream(this._outputStream);
103
148
  disposeStream(this._outputStream);
104
149
  this._outputStream = await this.getAudioStream(audioSource);
105
150
  if (this._outputStream) {
106
151
  if (this.debug) console.log("VOIP: Got audio stream");
107
- this._net.startSendingStream(this._outputStream);
152
+ this._net?.startSendingStream(this._outputStream);
153
+ this.updateButton();
108
154
  return true;
109
155
  }
110
156
  else {
157
+ this.updateButton();
111
158
  if (!await microphonePermissionsGranted()) {
112
159
  showBalloonError("Microphone permissions not granted: Please grant microphone permissions to use voice chat");
113
160
  }
@@ -117,15 +164,19 @@
117
164
  return false;
118
165
  }
119
166
 
120
- /** Stop sending audio */
121
- disconnect() {
122
- this._net.stopSendingStream(this._outputStream);
167
+ /** Stop sending audio (muting your own microphone) */
168
+ disconnect(opts?: { remember: boolean }) {
169
+ if (opts?.remember) {
170
+ this._allowSending = false;
171
+ }
172
+ this._net?.stopSendingStream(this._outputStream);
123
173
  disposeStream(this._outputStream);
124
174
  this._outputStream = null;
175
+ this.updateButton();
125
176
  }
126
177
 
127
178
  /**
128
- * Mute or unmute the audio stream
179
+ * Mute or unmute the audio stream (this will only mute incoming streams and not mute your own microphone. Use disconnect() to mute your own microphone)
129
180
  */
130
181
  setMuted(mute: boolean) {
131
182
  const audio = this._outputStream?.getAudioTracks();
@@ -148,6 +199,50 @@
148
199
  return false;
149
200
  }
150
201
 
202
+ private async updateButton() {
203
+ if (this.createMenuButton) {
204
+ if (!this._menubutton) {
205
+ this._menubutton = document.createElement("button");
206
+ this._menubutton.addEventListener("click", () => {
207
+ if (this.isSending) {
208
+ this.disconnect({ remember: true });
209
+ }
210
+ else this.connect();
211
+ microphonePermissionsGranted().then(res => {
212
+ if (!res) showBalloonWarning("<strong>Microphone permissions not granted</strong>. Please allow your browser to use the microphone to be able to talk. Click on the button on the left side of your browser's address bar to allow microphone permissions.");
213
+ })
214
+ });
215
+ }
216
+ if (this._menubutton) {
217
+ this.context.menu.appendChild(this._menubutton);
218
+ if (this.activeAndEnabled) {
219
+ this._menubutton.style.display = "";
220
+ }
221
+ else {
222
+ this._menubutton.style.display = "none";
223
+ }
224
+ this._menubutton.title = this.isSending ? "Click to disable your microphone" : "Click to enable your microphone";
225
+ let label = this.isSending ? "" : "";
226
+ let icon = this.isSending ? "mic" : "mic_off";
227
+ const hasPermission = await microphonePermissionsGranted();
228
+ if (!hasPermission) {
229
+ label = "No Permission";
230
+ icon = "mic_off";
231
+ this._menubutton.title = "Microphone permissions not granted. Please allow your browser to use the microphone to be able to talk. This can usually be done in the addressbar of the webpage.";
232
+ }
233
+ this._menubutton.innerText = label;
234
+ this._menubutton.prepend(getIconElement(icon));
235
+ if (this.context.connection.isConnected == false)
236
+ this._menubutton.setAttribute("disabled", "");
237
+ else
238
+ this._menubutton.removeAttribute("disabled");
239
+ }
240
+ }
241
+ else if (!this.activeAndEnabled) {
242
+ this._menubutton?.remove();
243
+ }
244
+ }
245
+
151
246
  // private _analyzer?: AudioAnalyser;
152
247
 
153
248
  /** @deprecated */
@@ -176,12 +271,12 @@
176
271
  return null;
177
272
  }
178
273
 
179
- const myStream = await navigator.mediaDevices.getUserMedia({ audio: audio ?? true, video: false })
274
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: audio ?? true, video: false })
180
275
  .catch((err) => {
181
276
  console.warn("VOIP failed getting audio stream", err);
182
277
  return null;
183
278
  });
184
- return myStream;
279
+ return stream;
185
280
  }
186
281
 
187
282
  // we have to wait for the user to connect to a room when "auto connect" is enabled
@@ -189,15 +284,14 @@
189
284
  if (this.debug) console.log("VOIP: Joined room");
190
285
  // Wait a moment for user list to be populated
191
286
  await delay(300)
192
- if (this.autoConnect && !this.isSending) {
287
+ if (this.autoConnect && !this.isSending && this._allowSending) {
193
288
  this.connect();
194
289
  }
195
290
  }
196
291
  private onLeftRoom = () => {
197
- // if (this.debug)
198
- console.log("VOIP: Left room");
292
+ if (this.debug) console.log("VOIP: Left room");
199
293
  this.disconnect();
200
- for(const incoming of this._incomingStreams.values()) {
294
+ for (const incoming of this._incomingStreams.values()) {
201
295
  disposeStream(incoming.srcObject as MediaStream);
202
296
  }
203
297
  this._incomingStreams.clear();
@@ -209,17 +303,19 @@
209
303
  const userId = evt.target.userId;
210
304
  const stream = evt.stream;
211
305
 
212
- const existing = this._incomingStreams.get(userId);
213
- if (existing) {
214
- existing.srcObject = stream;
215
- existing.setAttribute("autoplay", "true");
306
+ let audioElement = this._incomingStreams.get(userId);
307
+ if (!audioElement) {
308
+ audioElement = new Audio()
309
+ this._incomingStreams.set(userId, audioElement);
216
310
  }
217
- else {
218
- const element = new Audio()
219
- this._incomingStreams.set(userId, element);
220
- element.setAttribute("autoplay", "true");
221
- element.srcObject = stream;
222
- }
311
+ audioElement.srcObject = stream;
312
+ audioElement.setAttribute("autoplay", "true");
313
+ // for mobile we need to wait for user interaction to play audio. Auto play doesnt work on android when the page is refreshed
314
+ Application.registerWaitForInteraction(() => {
315
+ audioElement?.play().catch((err) => {
316
+ console.error("VOIP: Failed to play audio", err);
317
+ });
318
+ })
223
319
  }
224
320
 
225
321
  private onStreamEnded = (evt: StreamEndedEvent) => {
@@ -235,14 +331,13 @@
235
331
  }
236
332
  }
237
333
 
238
-
239
334
  private onVisibilityChanged = () => {
240
335
  if (this.runInBackground) return;
241
336
  const visible = document.visibilityState === "visible";
242
337
  const muted = !visible;
243
338
  this.setMuted(muted);
244
- for (const key of this._incomingStreams) {
245
- const str = key[1];
339
+ for (const element of this._incomingStreams) {
340
+ const str = element[1];
246
341
  str.muted = muted;
247
342
  }
248
343
  };