@@ -3,7 +3,7 @@
|
|
3
3
|
import { MixerEvent } from "./Animator.js";
|
4
4
|
import { serializable } from "../engine/engine_serialization_decorator.js";
|
5
5
|
import { Mathf } from "../engine/engine_math.js";
|
6
|
-
import { Vec2 } from "../engine/engine_types.js";
|
6
|
+
import type { Vec2 } from "../engine/engine_types.js";
|
7
7
|
import { getParam } from "../engine/engine_utils.js";
|
8
8
|
|
9
9
|
const debug = getParam("debuganimation");
|
@@ -2,7 +2,7 @@
|
|
2
2
|
import { getParam } from "../../../../engine/engine_utils.js";
|
3
3
|
|
4
4
|
import { USDObject, buildMatrix } from "../ThreeUSDZExporter.js";
|
5
|
-
import { IUSDExporterExtension } from "../Extension.js";
|
5
|
+
import type { IUSDExporterExtension } from "../Extension.js";
|
6
6
|
|
7
7
|
import { Object3D, Matrix4, Vector3, Quaternion, Interpolant, AnimationClip, KeyframeTrack, PropertyBinding } from "three";
|
8
8
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { GLTF } from "three/examples/jsm/loaders/GLTFLoader.js";
|
1
|
+
import type { GLTF } from "three/examples/jsm/loaders/GLTFLoader.js";
|
2
2
|
import { ContextEvent, ContextRegistry } from "../engine/engine_context_registry.js";
|
3
3
|
import { addNewComponent } from "../engine/engine_components.js";
|
4
4
|
import { Animator } from "./Animator.js";
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import { Behaviour } from "./Component.js";
|
2
|
-
import { AnimationActionLoopStyles, AnimationAction, AnimationMixer } from "three";
|
2
|
+
import type { AnimationActionLoopStyles, AnimationAction, AnimationMixer } from "three";
|
3
3
|
import { getParam } from "../engine/engine_utils.js";
|
4
|
-
import { AnimatorControllerModel } from "../engine/extensions/NEEDLE_animator_controller_model.js";
|
4
|
+
import type { AnimatorControllerModel } from "../engine/extensions/NEEDLE_animator_controller_model.js";
|
5
5
|
import { AnimatorController } from "./AnimatorController.js";
|
6
6
|
import { serializable } from "../engine/engine_serialization_decorator.js";
|
7
7
|
import { Mathf } from "../engine/engine_math.js";
|
@@ -1,5 +1,6 @@
|
|
1
1
|
import { Animator } from "./Animator.js";
|
2
|
-
import {
|
2
|
+
import type { AnimatorControllerModel, Condition, State, Transition } from "../engine/extensions/NEEDLE_animator_controller_model.js";
|
3
|
+
import { AnimatorConditionMode, AnimatorControllerParameterType, AnimatorStateInfo, createMotion, StateMachineBehaviour } from "../engine/extensions/NEEDLE_animator_controller_model.js";
|
3
4
|
import { AnimationAction, AnimationClip, AnimationMixer, AxesHelper, Euler, KeyframeTrack, LoopOnce, Object3D, Quaternion, Vector3 } from "three";
|
4
5
|
import { deepClone, getParam } from "../engine/engine_utils.js";
|
5
6
|
import { Context } from "../engine/engine_setup.js";
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { EdgeDetectionMode, SMAAEffect, SMAAPreset } from "postprocessing";
|
2
2
|
import { serializable } from "../../../engine/engine_serialization.js";
|
3
|
-
import { EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
3
|
+
import { type EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
4
4
|
import { VolumeParameter } from "../VolumeParameter.js";
|
5
5
|
import { registerCustomEffectType } from "../VolumeProfile.js";
|
6
6
|
|
@@ -24,6 +24,7 @@
|
|
24
24
|
export { syncField } from "./engine_networking_auto.js";
|
25
25
|
export * from "./engine_networking_files.js";
|
26
26
|
export * from "./engine_networking_instantiate.js";
|
27
|
+
export * from "./engine_networking_streams.js";
|
27
28
|
export * from "./engine_networking_utils.js";
|
28
29
|
export * from "./engine_networking_peer.js";
|
29
30
|
export * from "./engine_patcher.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { GameObject } from "../../../../Component.js";
|
2
|
-
import { IUSDExporterExtension } from "../../Extension.js";
|
2
|
+
import type { IUSDExporterExtension } from "../../Extension.js";
|
3
3
|
import { USDObject, USDWriter, USDZExporterContext } from "../../ThreeUSDZExporter.js";
|
4
4
|
import { Object3D } from "three";
|
5
5
|
import { AudioSource } from "../../../../AudioSource.js";
|
@@ -19,6 +19,7 @@
|
|
19
19
|
for (const audioSource of audioSources) {
|
20
20
|
|
21
21
|
if (!audioSource.clip) continue;
|
22
|
+
if(typeof audioSource.clip !== "string") continue;
|
22
23
|
|
23
24
|
// do nothing if this audio source is not set to play on awake -
|
24
25
|
// should be controlled via PlayAudioOnClick instead then.
|
@@ -4,7 +4,7 @@
|
|
4
4
|
import * as utils from "../engine/engine_utils.js";
|
5
5
|
import { serializable } from "../engine/engine_serialization_decorator.js";
|
6
6
|
import { ApplicationEvents } from "../engine/engine_application.js";
|
7
|
-
import { AudioLoader, PositionalAudio } from "three";
|
7
|
+
import { Audio, AudioContext, AudioLoader, PositionalAudio } from "three";
|
8
8
|
|
9
9
|
|
10
10
|
const debug = utils.getParam("debugaudio");
|
@@ -65,7 +65,7 @@
|
|
65
65
|
if (fn == undefined) return;
|
66
66
|
if (AudioSource._userInteractionRegistered) return;
|
67
67
|
AudioSource._userInteractionRegistered = true;
|
68
|
-
if(debug) console.log("🔊 registered interaction, can play audio now");
|
68
|
+
if (debug) console.log("🔊 registered interaction, can play audio now");
|
69
69
|
document.removeEventListener('pointerdown', fn);
|
70
70
|
document.removeEventListener('click', fn);
|
71
71
|
document.removeEventListener('dragstart', fn);
|
@@ -83,7 +83,7 @@
|
|
83
83
|
}
|
84
84
|
|
85
85
|
@serializable(URL)
|
86
|
-
clip: string = "";
|
86
|
+
clip: string | MediaStream = "";
|
87
87
|
|
88
88
|
@serializable()
|
89
89
|
playOnAwake: boolean = false;
|
@@ -97,6 +97,7 @@
|
|
97
97
|
this._loop = val;
|
98
98
|
if (this.sound) this.sound.setLoop(val);
|
99
99
|
}
|
100
|
+
/** 0 = 2D, 1 = 3D */
|
100
101
|
@serializable()
|
101
102
|
get spatialBlend(): number {
|
102
103
|
return this._spatialBlend;
|
@@ -154,7 +155,8 @@
|
|
154
155
|
private shouldPlay: boolean = false;
|
155
156
|
// set this from audio context time, used to set clip offset when setting "time" property
|
156
157
|
// there is maybe a better way to set a audio clip current time?!
|
157
|
-
private _lastClipStartedLoading: string | null = null;
|
158
|
+
private _lastClipStartedLoading: string | MediaStream | null = null;
|
159
|
+
private _audioElement: HTMLAudioElement | null = null;
|
158
160
|
|
159
161
|
public get Sound(): PositionalAudio | null {
|
160
162
|
if (!this.sound && AudioSource._userInteractionRegistered) {
|
@@ -164,7 +166,7 @@
|
|
164
166
|
this.sound = new PositionalAudio(listener.listener);
|
165
167
|
this.gameObject.add(this.sound);
|
166
168
|
}
|
167
|
-
else if(debug) console.warn("No audio listener found in scene - can not play audio");
|
169
|
+
else if (debug) console.warn("No audio listener found in scene - can not play audio");
|
168
170
|
}
|
169
171
|
return this.sound;
|
170
172
|
}
|
@@ -181,7 +183,7 @@
|
|
181
183
|
if (!AudioSource._userInteractionRegistered) {
|
182
184
|
AudioSource._beginWaitForUserInteraction(() => {
|
183
185
|
if (this.enabled && !this.destroyed && this.shouldPlay)
|
184
|
-
this.
|
186
|
+
this.onNewClip(this.clip);
|
185
187
|
});
|
186
188
|
}
|
187
189
|
else if (this.playOnAwake && this.context.application.isVisible) {
|
@@ -225,7 +227,7 @@
|
|
225
227
|
|
226
228
|
private lerp = (x, y, a) => x * (1 - a) + y * a;
|
227
229
|
|
228
|
-
private
|
230
|
+
private createAudio = (buffer?: AudioBuffer) => {
|
229
231
|
if (debug) console.log("audio buffer loaded");
|
230
232
|
AudioSource.registerWaitForAllowAudio(() => {
|
231
233
|
if (debug)
|
@@ -239,9 +241,10 @@
|
|
239
241
|
if (sound.isPlaying)
|
240
242
|
sound.stop();
|
241
243
|
|
242
|
-
|
244
|
+
if (buffer)
|
245
|
+
sound.setBuffer(buffer);
|
243
246
|
sound.loop = this._loop;
|
244
|
-
if(this.context.application.muted) sound.setVolume(0);
|
247
|
+
if (this.context.application.muted) sound.setVolume(0);
|
245
248
|
else sound.setVolume(this.volume);
|
246
249
|
sound.autoplay = this.shouldPlay;
|
247
250
|
// sound.setDistanceModel('linear');
|
@@ -289,34 +292,49 @@
|
|
289
292
|
}
|
290
293
|
}
|
291
294
|
|
292
|
-
private
|
293
|
-
if (clip)
|
294
|
-
|
295
|
-
if (this.clip) {
|
295
|
+
private onNewClip(clip?: string | MediaStream) {
|
296
|
+
if (clip) this.clip = clip;
|
297
|
+
if (typeof clip === "string") {
|
296
298
|
if (debug)
|
297
|
-
console.log(
|
298
|
-
if (
|
299
|
+
console.log(clip);
|
300
|
+
if (clip.endsWith(".mp3") || clip.endsWith(".wav")) {
|
299
301
|
if (!this.audioLoader)
|
300
302
|
this.audioLoader = new AudioLoader();
|
301
303
|
this.shouldPlay = true;
|
302
|
-
if (this._lastClipStartedLoading ===
|
304
|
+
if (this._lastClipStartedLoading === clip) {
|
303
305
|
if (debug) console.log("Is currently loading:", this._lastClipStartedLoading, this)
|
304
306
|
return;
|
305
307
|
}
|
306
|
-
this._lastClipStartedLoading =
|
308
|
+
this._lastClipStartedLoading = clip;
|
307
309
|
if (debug)
|
308
|
-
console.log("load audio",
|
309
|
-
this.audioLoader.load(
|
310
|
+
console.log("load audio", clip);
|
311
|
+
this.audioLoader.load(clip, this.createAudio, () => { }, console.error);
|
310
312
|
}
|
313
|
+
else console.warn("Unsupported audio clip type", clip)
|
311
314
|
}
|
315
|
+
else {
|
316
|
+
this.shouldPlay = true;
|
317
|
+
this.createAudio();
|
318
|
+
}
|
312
319
|
}
|
313
320
|
|
314
|
-
|
315
|
-
|
316
|
-
|
321
|
+
/** Play a mediastream */
|
322
|
+
play(clip: string | MediaStream | undefined = undefined) {
|
323
|
+
// We only support strings and media stream
|
324
|
+
// TODO: maybe we should return here if an invalid value is passed in
|
325
|
+
if (clip !== undefined && typeof clip !== "string" && !(clip instanceof MediaStream)) {
|
326
|
+
console.warn("Called play on AudioSource with unknown argument type", clip)
|
327
|
+
clip = undefined;
|
328
|
+
}
|
317
329
|
|
318
|
-
|
319
|
-
|
330
|
+
// Check if we need to call load first
|
331
|
+
let needsLoading = !this.sound || (clip && clip !== this.clip);
|
332
|
+
if (typeof clip === "string" && !this.audioLoader) needsLoading = true;
|
333
|
+
if (clip instanceof MediaStream || typeof clip === "string")
|
334
|
+
this.clip = clip;
|
335
|
+
if (needsLoading) {
|
336
|
+
this.shouldPlay = true;
|
337
|
+
this.onNewClip(clip);
|
320
338
|
return;
|
321
339
|
}
|
322
340
|
|
@@ -326,8 +344,28 @@
|
|
326
344
|
console.log("play", this.sound?.getVolume(), this.sound);
|
327
345
|
if (this.sound && !this.sound.isPlaying) {
|
328
346
|
const muted = this.context.application.muted;
|
329
|
-
if(muted) this.sound.setVolume(0);
|
330
|
-
|
347
|
+
if (muted) this.sound.setVolume(0);
|
348
|
+
|
349
|
+
if (this.clip instanceof MediaStream) {
|
350
|
+
|
351
|
+
// We have to set the audio element source to the mediastream as well
|
352
|
+
// otherwise it will not play for some reason...
|
353
|
+
this.sound.setMediaStreamSource(this.clip);
|
354
|
+
|
355
|
+
if (!this._audioElement) {
|
356
|
+
this._audioElement = document.createElement('audio');
|
357
|
+
this._audioElement.style.display = "none";
|
358
|
+
}
|
359
|
+
if (!this._audioElement.parentNode)
|
360
|
+
this.context.domElement.shadowRoot?.append(this._audioElement);
|
361
|
+
this._audioElement.srcObject = this.clip;
|
362
|
+
this._audioElement.autoplay = false;
|
363
|
+
|
364
|
+
}
|
365
|
+
else {
|
366
|
+
if (this._audioElement) this._audioElement.remove();
|
367
|
+
this.sound.play(muted ? .1 : 0);
|
368
|
+
}
|
331
369
|
}
|
332
370
|
}
|
333
371
|
|
@@ -339,6 +377,7 @@
|
|
339
377
|
this._lastContextTime = this.sound?.context.currentTime;
|
340
378
|
this.sound.pause();
|
341
379
|
}
|
380
|
+
this._audioElement?.remove();
|
342
381
|
}
|
343
382
|
|
344
383
|
stop() {
|
@@ -351,6 +390,7 @@
|
|
351
390
|
console.log(this._lastContextTime)
|
352
391
|
this.sound.stop();
|
353
392
|
}
|
393
|
+
this._audioElement?.remove();
|
354
394
|
}
|
355
395
|
|
356
396
|
private _lastContextTime: number = 0;
|
@@ -4,9 +4,8 @@
|
|
4
4
|
import { AvatarMarker } from "../webxr/WebXRAvatar.js";
|
5
5
|
import * as utils from "../../engine/engine_three_utils.js";
|
6
6
|
import { OwnershipModel } from "../../engine/engine_networking.js";
|
7
|
-
import { Int8BufferAttribute } from "three";
|
8
7
|
import { Context } from "../../engine/engine_setup.js";
|
9
|
-
import { IModel } from "../../engine/engine_networking_types.js";
|
8
|
+
import type { IModel } from "../../engine/engine_networking_types.js";
|
10
9
|
|
11
10
|
export class Avatar_POI {
|
12
11
|
|
@@ -4,8 +4,7 @@
|
|
4
4
|
import { EventSystem } from "./EventSystem.js";
|
5
5
|
import { showGizmos } from '../../engine/engine_default_parameters.js';
|
6
6
|
import { AxesHelper, Object3D } from 'three';
|
7
|
-
import { ICanvas
|
8
|
-
import { ShadowCastingMode } from '../Renderer.js';
|
7
|
+
import type { ICanvas } from './Interfaces.js';
|
9
8
|
import { getParam } from '../../engine/engine_utils.js';
|
10
9
|
export const includesDir = "./include";
|
11
10
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { GameObject } from "../../../../Component.js";
|
2
|
-
import { IContext } from "../../../../../engine/engine_types.js";
|
3
|
-
import { IUSDExporterExtension } from "../../Extension.js";
|
2
|
+
import type { IContext } from "../../../../../engine/engine_types.js";
|
3
|
+
import type { IUSDExporterExtension } from "../../Extension.js";
|
4
4
|
import { USDObject, USDWriter } from "../../ThreeUSDZExporter.js";
|
5
5
|
import { BehaviorModel } from "./BehavioursBuilder.js";
|
6
6
|
import { getParam } from "../../../../../engine/engine_utils.js";
|
@@ -2,15 +2,15 @@
|
|
2
2
|
import { Animator } from "../../../../Animator.js";
|
3
3
|
import { Renderer } from "../../../../Renderer.js";
|
4
4
|
import { serializable } from "../../../../../engine/engine_serialization_decorator.js";
|
5
|
-
import { IPointerClickHandler, PointerEventData } from "../../../../ui/PointerEvents.js";
|
6
|
-
import { RegisteredAnimationInfo, UsdzAnimation } from "../Animation.js";
|
5
|
+
import type { IPointerClickHandler, PointerEventData } from "../../../../ui/PointerEvents.js";
|
6
|
+
import { RegisteredAnimationInfo, type UsdzAnimation } from "../Animation.js";
|
7
7
|
import { getWorldPosition, getWorldQuaternion, getWorldScale, setWorldPosition, setWorldQuaternion, setWorldScale } from "../../../../../engine/engine_three_utils.js";
|
8
8
|
|
9
9
|
import { Object3D, Material, Vector3, Quaternion, Mesh, Group } from "three";
|
10
10
|
import { USDDocument, USDObject } from "../../ThreeUSDZExporter.js";
|
11
11
|
|
12
|
-
import { BehaviorExtension, UsdzBehaviour } from "./Behaviour.js";
|
13
|
-
import { ActionBuilder, ActionModel, AuralMode, BehaviorModel, IBehaviorElement, MotionType, PlayAction, Space, TriggerBuilder } from "./BehavioursBuilder.js";
|
12
|
+
import type { BehaviorExtension, UsdzBehaviour } from "./Behaviour.js";
|
13
|
+
import { ActionBuilder, ActionModel, AuralMode, BehaviorModel, type IBehaviorElement, MotionType, PlayAction, Space, TriggerBuilder } from "./BehavioursBuilder.js";
|
14
14
|
import { AudioSource } from "../../../../AudioSource.js";
|
15
15
|
import { NEEDLE_progressive } from "../../../../../engine/extensions/NEEDLE_progressive.js";
|
16
16
|
import { isDevEnvironment } from "../../../../../engine/debug/index.js";
|
@@ -496,6 +496,7 @@
|
|
496
496
|
|
497
497
|
const clipUrl = this.clip ? this.clip : this.target ? this.target.clip : undefined;
|
498
498
|
if (!clipUrl) return;
|
499
|
+
if(typeof clipUrl !== "string") return;
|
499
500
|
|
500
501
|
const playbackTarget = this.target ? this.target.gameObject : this.gameObject;
|
501
502
|
const clipName = clipUrl.split("/").pop();
|
@@ -513,6 +514,7 @@
|
|
513
514
|
if (!this.target && !this.clip) return;
|
514
515
|
const clipUrl = this.clip ? this.clip : this.target ? this.target.clip : undefined;
|
515
516
|
if (!clipUrl) return;
|
517
|
+
if(typeof clipUrl !== "string") return;
|
516
518
|
const clipName = clipUrl.split("/").pop();
|
517
519
|
|
518
520
|
const audio = await fetch(this.clip);
|
@@ -2,7 +2,7 @@
|
|
2
2
|
import { getParam } from "../engine/engine_utils.js";
|
3
3
|
import { CreateWireCube, Gizmos } from "../engine/engine_gizmos.js";
|
4
4
|
import { getWorldPosition, getWorldScale } from "../engine/engine_three_utils.js";
|
5
|
-
import { Box3, Color, ColorRepresentation, LineSegments, Object3D, Vector3 } from "three";
|
5
|
+
import { Box3, Color, type ColorRepresentation, LineSegments, Object3D, Vector3 } from "three";
|
6
6
|
|
7
7
|
const gizmos = getParam("gizmos");
|
8
8
|
const debug = getParam("debugboxhelper");
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { Behaviour, GameObject } from "../Component.js";
|
2
2
|
import { EventList } from "../EventList.js";
|
3
|
-
import { IPointerClickHandler, IPointerEnterHandler, IPointerEventHandler, IPointerExitHandler, PointerEventData } from "./PointerEvents.js";
|
3
|
+
import type { IPointerClickHandler, IPointerEnterHandler, IPointerEventHandler, IPointerExitHandler, PointerEventData } from "./PointerEvents.js";
|
4
4
|
import { Image } from "./Image.js";
|
5
5
|
import { RGBAColor } from "../js-extensions/RGBAColor.js";
|
6
6
|
import { serializable } from "../../engine/engine_serialization_decorator.js";
|
@@ -3,12 +3,12 @@
|
|
3
3
|
import { serializable } from "../engine/engine_serialization_decorator.js";
|
4
4
|
import { RGBAColor } from "./js-extensions/RGBAColor.js";
|
5
5
|
import { Context, XRSessionMode } from "../engine/engine_setup.js";
|
6
|
-
import { ICamera } from "../engine/engine_types.js"
|
6
|
+
import type { ICamera } from "../engine/engine_types.js"
|
7
7
|
import { isDevEnvironment, showBalloonMessage, showBalloonWarning } from "../engine/debug/index.js";
|
8
|
-
import { getWorldPosition
|
8
|
+
import { getWorldPosition } from "../engine/engine_three_utils.js";
|
9
9
|
import { Gizmos } from "../engine/engine_gizmos.js";
|
10
10
|
|
11
|
-
import { EquirectangularReflectionMapping, OrthographicCamera, PerspectiveCamera, Ray, SRGBColorSpace,
|
11
|
+
import { EquirectangularReflectionMapping, OrthographicCamera, PerspectiveCamera, Ray, SRGBColorSpace, Vector3 } from "three";
|
12
12
|
import { OrbitControls } from "./OrbitControls.js";
|
13
13
|
import { RenderTexture } from "../engine/engine_texture.js";
|
14
14
|
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import { OrbitControls } from "./OrbitControls.js";
|
2
2
|
import { addNewComponent, getOrAddComponent } from "../engine/engine_components.js";
|
3
3
|
import { Object3D } from "three";
|
4
|
-
import { ICamera, IContext } from "../engine/engine_types.js";
|
4
|
+
import type { ICamera, IContext } from "../engine/engine_types.js";
|
5
5
|
import { RGBAColor } from "./js-extensions/RGBAColor.js";
|
6
6
|
import { ContextEvent, ContextRegistry } from "../engine/engine_context_registry.js";
|
7
7
|
import { getCameraController } from "../engine/engine_camera.js";
|
@@ -1,11 +1,11 @@
|
|
1
|
-
import {
|
1
|
+
import { updateRenderSettings as updateRenderSettingsRecursive } from "./Utils.js";
|
2
2
|
import { serializable } from "../../engine/engine_serialization_decorator.js";
|
3
3
|
import { FrameEvent } from "../../engine/engine_setup.js";
|
4
4
|
import { BaseUIComponent, UIRootComponent } from "./BaseUIComponent.js";
|
5
5
|
import { GameObject } from "../Component.js";
|
6
6
|
import { Matrix4, Object3D } from "three";
|
7
7
|
import { RectTransform } from "./RectTransform.js";
|
8
|
-
import { ICanvas, ICanvasEventReceiver, ILayoutGroup, IRectTransform } from "./Interfaces.js";
|
8
|
+
import type { ICanvas, ICanvasEventReceiver, ILayoutGroup, IRectTransform } from "./Interfaces.js";
|
9
9
|
import { Camera } from "../Camera.js";
|
10
10
|
import { EventSystem } from "./EventSystem.js";
|
11
11
|
import * as ThreeMeshUI from 'three-mesh-ui'
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import { Graphic } from "./Graphic.js";
|
2
2
|
import { FrameEvent } from "../../engine/engine_setup.js";
|
3
3
|
import { Behaviour, GameObject } from "../Component.js";
|
4
|
-
import { ICanvasGroup, IHasAlphaFactor } from "./Interfaces.js";
|
4
|
+
import { type ICanvasGroup, type IHasAlphaFactor } from "./Interfaces.js";
|
5
5
|
import { serializable } from "../../engine/engine_serialization_decorator.js";
|
6
6
|
import { BaseUIComponent } from "./BaseUIComponent.js";
|
7
7
|
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import { ChromaticAberrationEffect } from "postprocessing";
|
2
2
|
import { Vector2 } from "three";
|
3
3
|
import { serializable } from "../../../engine/engine_serialization.js";
|
4
|
-
import { EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
4
|
+
import { type EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
5
5
|
import { VolumeParameter } from "../VolumeParameter.js";
|
6
6
|
import { registerCustomEffectType, VolumeProfile } from "../VolumeProfile.js";
|
7
7
|
|
@@ -1,11 +1,11 @@
|
|
1
1
|
import { Behaviour } from "./Component.js";
|
2
2
|
import { Rigidbody } from "./RigidBody.js";
|
3
3
|
import { serializable } from "../engine/engine_serialization_decorator.js";
|
4
|
-
import {
|
4
|
+
import { Group, Mesh, Vector3 } from "three"
|
5
5
|
// import { IColliderProvider, registerColliderProvider } from "../engine/engine_physics.js";
|
6
|
-
import { IBoxCollider, ICollider, ISphereCollider } from "../engine/engine_types.js";
|
6
|
+
import type { IBoxCollider, ICollider, ISphereCollider } from "../engine/engine_types.js";
|
7
7
|
import { getWorldScale } from "../engine/engine_three_utils.js";
|
8
|
-
import { PhysicsMaterial } from "../engine/engine_physics.types.js";
|
8
|
+
import type { PhysicsMaterial } from "../engine/engine_physics.types.js";
|
9
9
|
import { validate } from "../engine/engine_util_decorator.js";
|
10
10
|
import { unwatchWrite, watchWrite } from "../engine/engine_utils.js";
|
11
11
|
|
@@ -1,9 +1,9 @@
|
|
1
1
|
import { BrightnessContrastEffect, HueSaturationEffect } from "postprocessing";
|
2
2
|
import { serializable } from "../../../engine/engine_serialization.js";
|
3
|
-
import { EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
3
|
+
import { type EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
4
4
|
import { VolumeParameter } from "../VolumeParameter.js";
|
5
5
|
import { registerCustomEffectType } from "../VolumeProfile.js";
|
6
|
-
import {
|
6
|
+
import { LinearToneMapping, NoToneMapping } from "three";
|
7
7
|
|
8
8
|
|
9
9
|
export class ColorAdjustments extends PostProcessingEffect {
|
@@ -4,7 +4,7 @@
|
|
4
4
|
import { Context, FrameEvent } from "../engine/engine_setup.js";
|
5
5
|
import * as main from "../engine/engine_mainloop_utils.js";
|
6
6
|
import { syncDestroy, syncInstantiate } from "../engine/engine_networking_instantiate.js";
|
7
|
-
import { ConstructorConcrete, SourceIdentifier, IComponent, IGameObject, Constructor, GuidsMap, Collision, ICollider } from "../engine/engine_types.js";
|
7
|
+
import type { ConstructorConcrete, SourceIdentifier, IComponent, IGameObject, Constructor, GuidsMap, Collision, ICollider } from "../engine/engine_types.js";
|
8
8
|
import { addNewComponent, destroyComponentInstance, findObjectOfType, findObjectsOfType, getComponent, getComponentInChildren, getComponentInParent, getComponents, getComponentsInChildren, getComponentsInParent, getOrAddComponent, moveComponentInstance, removeComponent } from "../engine/engine_components.js";
|
9
9
|
import { findByGuid, destroy, InstantiateOptions, instantiate, HideFlags, foreachComponent, markAsInstancedRendered, isActiveInHierarchy, isActiveSelf, isUsingInstancing, setActive, isDestroyed } from "../engine/engine_gameobject.js";
|
10
10
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { IUSDExporterExtension } from "../Extension.js";
|
1
|
+
import type { IUSDExporterExtension } from "../Extension.js";
|
2
2
|
|
3
3
|
export class DocumentExtension implements IUSDExporterExtension {
|
4
4
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { GameObject } from "./Component.js";
|
2
2
|
import { SyncedTransform } from "./SyncedTransform.js";
|
3
|
-
import { IPointerDownHandler, IPointerEnterHandler, IPointerEventHandler, IPointerExitHandler, IPointerUpHandler, PointerEventData } from "./ui/PointerEvents.js";
|
3
|
+
import type { IPointerDownHandler, IPointerEnterHandler, IPointerEventHandler, IPointerExitHandler, IPointerUpHandler, PointerEventData } from "./ui/PointerEvents.js";
|
4
4
|
import { Context } from "../engine/engine_setup.js";
|
5
5
|
import { Interactable, UsageMarker } from "./Interactable.js";
|
6
6
|
import { Rigidbody } from "./RigidBody.js";
|
@@ -8,7 +8,7 @@
|
|
8
8
|
import { Avatar_POI } from "./avatar/Avatar_Brain_LookAt.js";
|
9
9
|
import { RaycastOptions } from "../engine/engine_physics.js";
|
10
10
|
import { getWorldPosition, setWorldPosition } from "../engine/engine_three_utils.js";
|
11
|
-
import { KeyCode } from "../engine/engine_input.js";
|
11
|
+
import type { KeyCode } from "../engine/engine_input.js";
|
12
12
|
import { nameofFactory } from "../engine/engine_utils.js";
|
13
13
|
import { InstancingUtil } from "../engine/engine_instancing.js";
|
14
14
|
import { OrbitControls } from "./OrbitControls.js";
|
@@ -3,7 +3,7 @@
|
|
3
3
|
import * as files from "../engine/engine_networking_files.js";
|
4
4
|
import { serializable } from "../engine/engine_serialization_decorator.js";
|
5
5
|
import { Networking } from "../engine-components/Networking.js";
|
6
|
-
import { GLTF } from "three/examples/jsm/loaders/GLTFLoader.js";
|
6
|
+
import type { GLTF } from "three/examples/jsm/loaders/GLTFLoader.js";
|
7
7
|
import { getParam } from "../engine/engine_utils.js";
|
8
8
|
|
9
9
|
const debug = getParam("debugdroplistener");
|
@@ -6,9 +6,8 @@
|
|
6
6
|
import { registerPrefabProvider, syncInstantiate } from "./engine_networking_instantiate.js";
|
7
7
|
import { download } from "./engine_web_api.js";
|
8
8
|
import { getLoader } from "./engine_gltf.js";
|
9
|
-
import { IComponent, SourceIdentifier } from "./engine_types.js";
|
9
|
+
import type { IComponent, IGameObject, SourceIdentifier } from "./engine_types.js";
|
10
10
|
import { destroy, instantiate, InstantiateOptions, isDestroyed } from "./engine_gameobject.js";
|
11
|
-
import { IGameObject } from "./engine_types.js";
|
12
11
|
|
13
12
|
const debug = getParam("debugaddressables");
|
14
13
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { ICameraController } from "./engine_types.js";
|
1
|
+
import type { ICameraController } from "./engine_types.js";
|
2
2
|
import { Camera } from "three";
|
3
3
|
|
4
4
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { IComponent } from "./engine_types.js";
|
1
|
+
import { type IComponent } from "./engine_types.js";
|
2
2
|
import { getParam } from "./engine_utils.js";
|
3
3
|
|
4
4
|
export enum ComponentEvents {
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { Object3D, Scene } from "three";
|
2
|
-
import { Constructor, ConstructorConcrete, IComponent
|
2
|
+
import type { Constructor, ConstructorConcrete, IComponent, IGameObject } from "./engine_types.js";
|
3
3
|
import { Context, registerComponent } from "./engine_setup.js";
|
4
4
|
import { getParam } from "./engine_utils.js";
|
5
5
|
import { removeScriptFromContext, updateActiveInHierarchyWithoutEventCall } from "./engine_mainloop_utils.js";
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { IComponent, IContext, LoadedGLTF } from "./engine_types.js";
|
1
|
+
import { type IComponent, type IContext, type LoadedGLTF } from "./engine_types.js";
|
2
2
|
|
3
3
|
export enum ContextEvent {
|
4
4
|
/** called when the context is registered to the registry, the context is not fully initialized at this point */
|
@@ -2,7 +2,7 @@
|
|
2
2
|
BufferGeometry, Cache, Camera, Clock, Color, DepthTexture, Group,
|
3
3
|
Material, NearestFilter, NoToneMapping, Object3D, PCFSoftShadowMap,
|
4
4
|
PerspectiveCamera, RGBAFormat, Scene, SRGBColorSpace,
|
5
|
-
Texture, WebGLRenderer, WebGLRendererParameters, WebGLRenderTarget, WebXRArrayCamera
|
5
|
+
Texture, WebGLRenderer, type WebGLRendererParameters, WebGLRenderTarget, type WebXRArrayCamera
|
6
6
|
} from 'three'
|
7
7
|
import { Input } from './engine_input.js';
|
8
8
|
import { Physics } from './engine_physics.js';
|
@@ -22,10 +22,10 @@
|
|
22
22
|
import { RendererData as SceneLighting } from './engine_scenelighting.js';
|
23
23
|
import { Addressables } from './engine_addressables.js';
|
24
24
|
import { Application } from './engine_application.js';
|
25
|
-
import { LightDataRegistry, ILightDataRegistry } from './engine_lightdata.js';
|
25
|
+
import { LightDataRegistry, type ILightDataRegistry } from './engine_lightdata.js';
|
26
26
|
import { PlayerViewManager } from './engine_playerview.js';
|
27
27
|
|
28
|
-
import { CoroutineData, GLTF, ICamera, IComponent, IContext, ILight, LoadedGLTF } from "./engine_types.js"
|
28
|
+
import { type CoroutineData, type GLTF, type ICamera, type IComponent, type IContext, type ILight, type LoadedGLTF } from "./engine_types.js"
|
29
29
|
import { destroy, foreachComponent } from './engine_gameobject.js';
|
30
30
|
import { ContextEvent, ContextRegistry } from './engine_context_registry.js';
|
31
31
|
import { delay, getParam } from './engine_utils.js';
|
@@ -1,12 +1,12 @@
|
|
1
1
|
import { Context, ContextCreateArgs, LoadingProgressArgs } from "./engine_setup.js";
|
2
2
|
import { AROverlayHandler, arContainerClassName } from "./engine_element_overlay.js";
|
3
3
|
import { GameObject } from "../engine-components/Component.js";
|
4
|
-
import { calculateProgress01, EngineLoadingView, ILoadingViewHandler } from "./engine_element_loading.js";
|
4
|
+
import { calculateProgress01, EngineLoadingView, type ILoadingViewHandler } from "./engine_element_loading.js";
|
5
5
|
import { getParam } from "./engine_utils.js";
|
6
6
|
import { setDracoDecoderPath, setDracoDecoderType, setKtx2TranscoderPath } from "./engine_loaders.js";
|
7
7
|
import { getLoader, registerLoader } from "../engine/engine_gltf.js";
|
8
8
|
import { NeedleGltfLoader } from "./engine_scenetools.js";
|
9
|
-
import { INeedleEngineComponent, LoadedGLTF } from "./engine_types.js";
|
9
|
+
import { type INeedleEngineComponent, type LoadedGLTF } from "./engine_types.js";
|
10
10
|
import { isDevEnvironment, showBalloonWarning } from "./debug/index.js";
|
11
11
|
import { hasCommercialLicense } from "./engine_license.js";
|
12
12
|
import { VERSION } from "./engine_constants.js";
|
@@ -3,7 +3,7 @@
|
|
3
3
|
import { InstantiateIdProvider } from "./engine_networking_instantiate.js";
|
4
4
|
import { Context, registerComponent } from "./engine_setup.js";
|
5
5
|
import { logHierarchy, setWorldPosition, setWorldQuaternion } from "./engine_three_utils.js";
|
6
|
-
import { GuidsMap, IComponent as Component, IComponent, IGameObject as GameObject, UIDProvider, Constructor } from "./engine_types.js";
|
6
|
+
import { type GuidsMap, type IComponent as Component, type IComponent, type IGameObject as GameObject, type UIDProvider, type Constructor } from "./engine_types.js";
|
7
7
|
import { getParam, tryFindObject } from "./engine_utils.js";
|
8
8
|
import { apply } from "../engine-components/js-extensions/Object3D.js";
|
9
9
|
import { $isUsingInstancing, InstancingUtil } from "./engine_instancing.js";
|
@@ -1,7 +1,7 @@
|
|
1
|
-
import { BufferAttribute, Line, BoxGeometry, EdgesGeometry, Color, LineSegments, LineBasicMaterial, Object3D, Mesh, SphereGeometry, ColorRepresentation, Vector3, Box3, Quaternion, CylinderGeometry } from 'three';
|
1
|
+
import { BufferAttribute, Line, BoxGeometry, EdgesGeometry, Color, LineSegments, LineBasicMaterial, Object3D, Mesh, SphereGeometry, type ColorRepresentation, Vector3, Box3, Quaternion, CylinderGeometry } from 'three';
|
2
2
|
import { Context } from './engine_setup.js';
|
3
3
|
import { setWorldPositionXYZ } from './engine_three_utils.js';
|
4
|
-
import { Vec3, Vec4 } from './engine_types.js';
|
4
|
+
import type { Vec3, Vec4 } from './engine_types.js';
|
5
5
|
|
6
6
|
const _tmp = new Vector3();
|
7
7
|
const _tmp2 = new Vector3();
|
@@ -3,12 +3,11 @@
|
|
3
3
|
import { InstantiateIdProvider } from "./engine_networking_instantiate.js"
|
4
4
|
import { Context } from "./engine_setup.js";
|
5
5
|
import { deserializeObject, serializeObject } from "./engine_serialization.js";
|
6
|
-
import { assign, ImplementationInformation, ISerializable, SerializationContext } from "./engine_serialization_core.js";
|
6
|
+
import { assign, ImplementationInformation, type ISerializable, SerializationContext } from "./engine_serialization_core.js";
|
7
7
|
import { NEEDLE_components } from "./extensions/NEEDLE_components.js";
|
8
8
|
import { debugExtension } from "./engine_default_parameters.js";
|
9
9
|
import { editorGuidKeyName, builtinComponentKeyName } from "./engine_constants.js";
|
10
|
-
import { GuidsMap, ICamera, IComponent, IGameObject, SourceIdentifier } from "./engine_types.js";
|
11
|
-
import { UIDProvider } from "./engine_types.js";
|
10
|
+
import type { GuidsMap, ICamera, IComponent, IGameObject, SourceIdentifier, UIDProvider } from "./engine_types.js";
|
12
11
|
import { addNewComponent } from "./engine_components.js";
|
13
12
|
import { getParam } from "./engine_utils.js";
|
14
13
|
import { LogType, showBalloonMessage } from "./debug/index.js";
|
@@ -1,8 +1,8 @@
|
|
1
|
-
import {
|
1
|
+
import type { ConstructorConcrete, SourceIdentifier, UIDProvider } from "./engine_types.js";
|
2
2
|
import { Context } from "./engine_setup.js";
|
3
3
|
import { NEEDLE_components } from "./extensions/NEEDLE_components.js";
|
4
4
|
import { SerializationContext } from "./engine_serialization_core.js";
|
5
|
-
import { GLTF } from 'three/examples/jsm/loaders/GLTFLoader.js'
|
5
|
+
import type { GLTF } from 'three/examples/jsm/loaders/GLTFLoader.js'
|
6
6
|
|
7
7
|
|
8
8
|
export interface INeedleGltfLoader {
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { IComponent } from "./engine_types.js";
|
1
|
+
import type { IComponent } from "./engine_types.js";
|
2
2
|
import { TypeStore } from "./engine_typestore.js";
|
3
3
|
import { addScriptToArrays, removeScriptFromContext } from "./engine_mainloop_utils.js"
|
4
4
|
import { getParam } from "./engine_utils.js";
|
@@ -1,8 +1,7 @@
|
|
1
1
|
import { Vector2 } from 'three';
|
2
2
|
import { showBalloonMessage, showBalloonWarning } from './debug/debug.js';
|
3
|
-
import { assign } from './engine_serialization_core.js';
|
4
3
|
import { Context } from './engine_setup.js';
|
5
|
-
import { IInput, Vec2 } from './engine_types.js';
|
4
|
+
import type { IInput, Vec2 } from './engine_types.js';
|
6
5
|
import { getParam } from './engine_utils.js';
|
7
6
|
|
8
7
|
const debug = getParam("debuginput");
|
@@ -646,10 +645,13 @@
|
|
646
645
|
let dx = evt.clientX - lf.x;
|
647
646
|
let dy = evt.clientY - lf.y;
|
648
647
|
// if pointer is locked, clientX and Y are not changed, but Movement is.
|
649
|
-
if(
|
650
|
-
|
651
|
-
|
652
|
-
|
648
|
+
if(evt.source instanceof MouseEvent || evt.source instanceof TouchEvent) {
|
649
|
+
const source = evt.source as PointerEvent;
|
650
|
+
if(dx === 0 && source.movementX !== 0)
|
651
|
+
dx = source.movementX || 0;
|
652
|
+
if(dy === 0 && source.movementY !== 0)
|
653
|
+
dy = source.movementY || 0;
|
654
|
+
}
|
653
655
|
delta.x += dx;
|
654
656
|
delta.y += dy;
|
655
657
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { getParam, isMobileDevice } from "./engine_utils.js";
|
2
2
|
import { ContextEvent, ContextRegistry } from "./engine_context_registry.js";
|
3
|
-
import { IContext } from "./engine_types.js";
|
3
|
+
import type { IContext } from "./engine_types.js";
|
4
4
|
import { logoSVG } from "./assets/index.js";
|
5
5
|
import { GENERATOR, VERSION } from "./engine_constants.js";
|
6
6
|
|
@@ -2,7 +2,7 @@
|
|
2
2
|
import { Texture, ShaderChunk, UniformsLib, Vector4 } from "three";
|
3
3
|
import { Context } from "./engine_setup.js";
|
4
4
|
import { getParam } from "./engine_utils.js";
|
5
|
-
import { SourceIdentifier } from "./engine_types.js";
|
5
|
+
import type { SourceIdentifier } from "./engine_types.js";
|
6
6
|
|
7
7
|
const debugLightmap = getParam("debuglightmaps") ? true : false;
|
8
8
|
|
@@ -2,10 +2,10 @@
|
|
2
2
|
import * as constants from "./engine_constants.js";
|
3
3
|
import { getParam } from './engine_utils.js';
|
4
4
|
import { CubeCamera, Object3D, Scene, WebGLCubeRenderTarget } from 'three';
|
5
|
-
import { IComponent, IContext } from './engine_types.js';
|
5
|
+
import type { IComponent, IContext } from './engine_types.js';
|
6
6
|
import { isActiveSelf } from './engine_gameobject.js';
|
7
7
|
import { ContextRegistry } from "./engine_context_registry.js";
|
8
|
-
import {
|
8
|
+
import { isDevEnvironment } from "./debug/index.js";
|
9
9
|
|
10
10
|
const debug = getParam("debugnewscripts");
|
11
11
|
const debugHierarchy = getParam("debughierarchy");
|
@@ -1,8 +1,5 @@
|
|
1
|
-
import { Vector } from "three";
|
2
|
-
import { inverseLerp } from "three/src/math/MathUtils.js";
|
1
|
+
import type { Vector } from "three";
|
3
2
|
|
4
|
-
|
5
|
-
|
6
3
|
class MathHelper {
|
7
4
|
|
8
5
|
random(): number {
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { getParam } from "./engine_utils.js";
|
2
2
|
import { isDevEnvironment } from "./debug/index.js";
|
3
|
-
import { IComponent } from "./engine_types.js";
|
3
|
+
import type { IComponent } from "./engine_types.js";
|
4
4
|
|
5
5
|
const debug = getParam("debugautosync");
|
6
6
|
|
@@ -1,8 +1,8 @@
|
|
1
1
|
// import { SyncedTransform } from "../engine-components/SyncedTransform.js";
|
2
2
|
// import { DragControls } from "../engine-components/DragControls.js"
|
3
3
|
// import { ObjectRaycaster } from "../engine-components/ui/Raycaster.js";
|
4
|
-
import { UIDProvider } from "./engine_types.js";
|
5
|
-
import { GLTF } from "three/examples/jsm/loaders/GLTFLoader.js";
|
4
|
+
import type { UIDProvider } from "./engine_types.js";
|
5
|
+
import type { GLTF } from "three/examples/jsm/loaders/GLTFLoader.js";
|
6
6
|
// import { Animation } from "../engine-components/Animation.js";
|
7
7
|
|
8
8
|
|
@@ -3,10 +3,10 @@
|
|
3
3
|
import { NetworkConnection } from "../engine/engine_networking.js";
|
4
4
|
import { generateSeed, InstantiateIdProvider } from "../engine/engine_networking_instantiate.js";
|
5
5
|
import * as def from "./engine_networking_files_default_components.js"
|
6
|
-
import { GLTF } from 'three/examples/jsm/loaders/GLTFLoader.js'
|
6
|
+
import type { GLTF } from 'three/examples/jsm/loaders/GLTFLoader.js'
|
7
7
|
import { getLoader } from "../engine/engine_gltf.js";
|
8
|
-
import { IModel } from "./engine_networking_types.js";
|
9
|
-
import { IGameObject } from "./engine_types.js";
|
8
|
+
import type { IModel } from "./engine_networking_types.js";
|
9
|
+
import type { IGameObject } from "./engine_types.js";
|
10
10
|
import { findByGuid } from "./engine_gameobject.js";
|
11
11
|
import { ContextEvent, ContextRegistry } from "./engine_context_registry.js";
|
12
12
|
import { BoxGeometry, BoxHelper, Mesh, MeshBasicMaterial, Object3D, Vector3 } from "three";
|
@@ -2,14 +2,14 @@
|
|
2
2
|
import * as THREE from "three";
|
3
3
|
import { Context } from "./engine_setup.js"
|
4
4
|
import * as utils from "./engine_utils.js"
|
5
|
-
import { INetworkConnection } from "./engine_networking_types.js";
|
6
|
-
import { IGameObject as GameObject, IComponent as Component } from "./engine_types.js"
|
5
|
+
import type { INetworkConnection } from "./engine_networking_types.js";
|
6
|
+
import type { IGameObject as GameObject, IComponent as Component } from "./engine_types.js"
|
7
7
|
|
8
8
|
// https://github.com/uuidjs/uuid
|
9
9
|
// v5 takes string and namespace
|
10
10
|
import { v5 } from 'uuid';
|
11
|
-
import { UIDProvider } from "./engine_types.js";
|
12
|
-
import { IModel } from "./engine_networking_types.js";
|
11
|
+
import type { UIDProvider } from "./engine_types.js";
|
12
|
+
import type { IModel } from "./engine_networking_types.js";
|
13
13
|
import { SendQueue } from "./engine_networking_types.js";
|
14
14
|
import { destroy, findByGuid, instantiate } from "./engine_gameobject.js";
|
15
15
|
import { Object3D } from "three";
|
@@ -1,6 +1,6 @@
|
|
1
|
-
import Peer, { PeerConnectOption } from "peerjs";
|
2
|
-
import {
|
3
|
-
import { ConstructorConcrete } from "./engine_types.js";
|
1
|
+
import Peer, { type PeerConnectOption } from "peerjs";
|
2
|
+
import type { DataConnection, PeerJSOption } from "peerjs";
|
3
|
+
import { type ConstructorConcrete } from "./engine_types.js";
|
4
4
|
|
5
5
|
let peerOptions: PeerJSOption | undefined = undefined;
|
6
6
|
|
@@ -8,7 +8,7 @@
|
|
8
8
|
import * as flatbuffers from 'flatbuffers';
|
9
9
|
import * as schemes from "../engine-schemes/schemes.js";
|
10
10
|
import { PeerNetworking } from './engine_networking_peer.js';
|
11
|
-
import { IModel, INetworkConnection, SendQueue } from './engine_networking_types.js';
|
11
|
+
import { type IModel, type INetworkConnection, SendQueue } from './engine_networking_types.js';
|
12
12
|
import { isHostedOnGlitch } from './engine_networking_utils.js';
|
13
13
|
|
14
14
|
export const debugNet = utils.getParam("debugnet") ? true : false;
|
@@ -481,6 +481,7 @@
|
|
481
481
|
|
482
482
|
console.log("⊡ Connecting to networking backend on\n" + serverUrl)
|
483
483
|
const pkg = await import('websocket-ts');
|
484
|
+
// @ts-ignore
|
484
485
|
const WebsocketBuilder = pkg.default?.WebsocketBuilder ?? pkg.WebsocketBuilder;
|
485
486
|
const ws = new WebsocketBuilder(serverUrl)
|
486
487
|
.onOpen(() => {
|
@@ -580,11 +581,18 @@
|
|
580
581
|
this._currentRoomName = model.room;
|
581
582
|
this._currentRoomViewId = model.viewId;
|
582
583
|
this._currentRoomAllowEditing = model.allowEditing ?? true;
|
583
|
-
|
584
|
+
|
584
585
|
this._currentInRoom.length = 0;
|
585
586
|
this._currentInRoom.push(...model.inRoom);
|
586
587
|
if (debugNet)
|
587
588
|
console.log("joined room with", this._currentInRoom, this.context.alias ?? "");
|
589
|
+
|
590
|
+
const viewUrl = new URL(window.location.href);
|
591
|
+
if (viewUrl.searchParams.has("room")) {
|
592
|
+
viewUrl.searchParams.delete("room");
|
593
|
+
}
|
594
|
+
viewUrl.searchParams.set("view", this._currentRoomViewId);
|
595
|
+
console.log("Room view id: " + this._currentRoomViewId + "\n" + viewUrl.href);
|
588
596
|
}
|
589
597
|
|
590
598
|
this.onSendQueued(SendQueue.OnRoomJoin);
|
@@ -2,13 +2,11 @@
|
|
2
2
|
import * as BufferGeometryUtils from 'three/examples/jsm/utils/BufferGeometryUtils.js'
|
3
3
|
import { CircularBuffer, getParam } from "./engine_utils.js"
|
4
4
|
import { getWorldPosition, getWorldQuaternion, getWorldScale, setWorldPositionXYZ, setWorldQuaternionXYZW } from "./engine_three_utils.js"
|
5
|
-
import {
|
5
|
+
import type {
|
6
6
|
IPhysicsEngine,
|
7
7
|
IComponent,
|
8
8
|
ICollider,
|
9
9
|
IRigidbody,
|
10
|
-
Collision,
|
11
|
-
ContactPoint,
|
12
10
|
Vec3,
|
13
11
|
IGameObject,
|
14
12
|
Vec2,
|
@@ -16,6 +14,7 @@
|
|
16
14
|
ISphereCollider,
|
17
15
|
IBoxCollider,
|
18
16
|
} from './engine_types.js';
|
17
|
+
import { ContactPoint, Collision } from './engine_types.js';
|
19
18
|
import { foreachComponent } from './engine_gameobject.js';
|
20
19
|
|
21
20
|
import { ActiveCollisionTypes, ActiveEvents, CoefficientCombineRule, Ball, Collider, ColliderDesc, EventQueue, JointData, QueryFilterFlags, RigidBody, RigidBodyType, ShapeColliderTOI, World, Ray, ShapeType, Cuboid } from '@dimforge/rapier3d-compat';
|
@@ -593,7 +592,7 @@
|
|
593
592
|
positions = this._meshCache.get(key)!;
|
594
593
|
}
|
595
594
|
else {
|
596
|
-
console.warn(
|
595
|
+
console.warn(`Your MeshCollider \"${collider.name}\" is scaled\nthis is not optimal for performance since this isn't supported by the Rapier physics engine yet. Consider applying the scale to the collider mesh`);
|
597
596
|
// showBalloonWarning("Your model is using scaled mesh colliders which is not optimal for performance: " + mesh.name + ", consider using unscaled objects");
|
598
597
|
const scaledPositions = new Float32Array(positions.length);
|
599
598
|
for (let i = 0; i < positions.length; i += 3) {
|
@@ -758,6 +757,7 @@
|
|
758
757
|
|
759
758
|
private internalUpdateColliderProperties(col: ICollider, collider: Collider) {
|
760
759
|
const shape = collider.shape;
|
760
|
+
let sizeHasChanged = false;
|
761
761
|
switch (shape.type) {
|
762
762
|
// Sphere Collider
|
763
763
|
case ShapeType.Ball:
|
@@ -767,21 +767,36 @@
|
|
767
767
|
const obj = col.gameObject;
|
768
768
|
const scale = getWorldScale(obj, this._tempPosition);
|
769
769
|
const radius = Math.abs(sc.radius * scale.x);
|
770
|
-
|
770
|
+
sizeHasChanged = ball.radius !== radius;
|
771
771
|
ball.radius = radius;
|
772
|
-
if (
|
772
|
+
if (sizeHasChanged) {
|
773
773
|
collider.setShape(ball);
|
774
|
+
}
|
774
775
|
break;
|
775
776
|
}
|
776
777
|
case ShapeType.Cuboid:
|
777
778
|
const cuboid = shape as Cuboid;
|
778
779
|
const sc = col as IBoxCollider;
|
779
|
-
|
780
|
-
|
781
|
-
|
782
|
-
|
780
|
+
const newX = sc.size.x * 0.5;
|
781
|
+
const newY = sc.size.y * 0.5;
|
782
|
+
const newZ = sc.size.z * 0.5;
|
783
|
+
sizeHasChanged = cuboid.halfExtents.x !== newX || cuboid.halfExtents.y !== newY || cuboid.halfExtents.z !== newZ;
|
784
|
+
cuboid.halfExtents.x = newX;
|
785
|
+
cuboid.halfExtents.y = newY;
|
786
|
+
cuboid.halfExtents.z = newZ;
|
787
|
+
if (sizeHasChanged) {
|
788
|
+
collider.setShape(cuboid);
|
789
|
+
}
|
783
790
|
break;
|
784
791
|
}
|
792
|
+
|
793
|
+
if (sizeHasChanged) {
|
794
|
+
const rb = col.attachedRigidbody;
|
795
|
+
if (rb?.autoMass) {
|
796
|
+
const ph = this.getBody(rb) as RigidBody
|
797
|
+
ph?.recomputeMassPropertiesFromColliders();
|
798
|
+
}
|
799
|
+
}
|
785
800
|
}
|
786
801
|
|
787
802
|
private internalUpdateRigidbodyProperties(rb: IRigidbody, rigidbody: RigidBody) {
|
@@ -1,9 +1,9 @@
|
|
1
|
-
import { Box3, Camera, Intersection, Layers, Mesh, Object3D, Ray, Raycaster, Sphere, Vector2, Vector3 } from 'three'
|
1
|
+
import { Box3, Camera, type Intersection, Layers, Mesh, Object3D, Ray, Raycaster, Sphere, Vector2, Vector3 } from 'three'
|
2
2
|
import { Context } from './engine_setup.js';
|
3
3
|
import { getParam } from "./engine_utils.js"
|
4
4
|
import { getWorldPosition } from "./engine_three_utils.js"
|
5
|
-
import { Vec2, Vec3, } from './engine_types.js';
|
6
|
-
import { IPhysicsEngine } from './engine_types.js';
|
5
|
+
import type { Vec2, Vec3, } from './engine_types.js';
|
6
|
+
import type { IPhysicsEngine } from './engine_types.js';
|
7
7
|
|
8
8
|
const debugPhysics = getParam("debugphysics");
|
9
9
|
const layerMaskHelper: Layers = new Layers();
|
@@ -4,7 +4,7 @@
|
|
4
4
|
import { SceneLightSettings } from "./extensions/NEEDLE_lighting_settings.js";
|
5
5
|
import { createFlatTexture, createTrilightTexture } from "./engine_shaders.js";
|
6
6
|
import { getParam } from "./engine_utils.js";
|
7
|
-
import { SourceIdentifier } from "./engine_types.js";
|
7
|
+
import { type SourceIdentifier } from "./engine_types.js";
|
8
8
|
import { AssetReference } from "./engine_addressables.js";
|
9
9
|
|
10
10
|
const debug = getParam("debugenvlight");
|
@@ -1,11 +1,11 @@
|
|
1
1
|
import { Context } from "./engine_setup.js"
|
2
|
-
import { GLTF, GLTFLoader } from 'three/examples/jsm/loaders/GLTFLoader.js'
|
2
|
+
import { type GLTF, GLTFLoader } from 'three/examples/jsm/loaders/GLTFLoader.js'
|
3
3
|
// import * as object from "./engine_gltf_builtin_components.js";
|
4
4
|
import * as loaders from "./engine_loaders.js"
|
5
5
|
import * as utils from "./engine_utils.js";
|
6
6
|
import { registerComponentExtension, registerExtensions } from "./extensions/extensions.js";
|
7
|
-
import { getLoader, INeedleGltfLoader, registerLoader } from "./engine_gltf.js";
|
8
|
-
import { SourceIdentifier, UIDProvider } from "./engine_types.js";
|
7
|
+
import { getLoader, type INeedleGltfLoader, registerLoader } from "./engine_gltf.js";
|
8
|
+
import { type SourceIdentifier, type UIDProvider } from "./engine_types.js";
|
9
9
|
import { createBuiltinComponents, writeBuiltinComponentData } from "./engine_gltf_builtin_components.js";
|
10
10
|
import { SerializationContext } from "./engine_serialization_core.js";
|
11
11
|
import { NEEDLE_components } from "./extensions/NEEDLE_components.js";
|
@@ -8,6 +8,7 @@
|
|
8
8
|
import { RenderTexture } from "./engine_texture.js";
|
9
9
|
import { isDevEnvironment, showBalloonMessage, showBalloonWarning } from "../engine/debug/index.js";
|
10
10
|
import { resolveUrl } from "./engine_utils.js";
|
11
|
+
import { AssetReference } from "./engine_addressables.js";
|
11
12
|
|
12
13
|
// export class SourcePath {
|
13
14
|
// src?:string
|
@@ -75,9 +76,14 @@
|
|
75
76
|
|
76
77
|
if (typeof data === "string") {
|
77
78
|
if (data.endsWith(".glb") || data.endsWith(".gltf")) {
|
79
|
+
// If the @serializable([Object3D, AssetReference]) looks like this we don't need to warn here. This is the case e.g. with SyncedCamera referencing a scene
|
80
|
+
if (context.serializable instanceof Array) {
|
81
|
+
if (context.serializable.includes(AssetReference)) return undefined;
|
82
|
+
}
|
78
83
|
if (isDevEnvironment())
|
79
84
|
showBalloonWarning("Detected wrong usage of @serializable with Object3D or GameObject. Instead you should use AssetReference here! Please see the console for details.");
|
80
|
-
|
85
|
+
const scriptname = context.target?.constructor?.name;
|
86
|
+
console.warn(`Wrong usage of @serializable detected in your script \"${scriptname}\"\n\nIt looks like you used @serializable(Object3D) or @serializable(GameObject) for a prefab or scene reference which is exported to a separate glTF file.\n\nTo fix this please change your code to:\n\n@serializable(AssetReference)\n${context.path}! : AssetReference;\n\0`);
|
81
87
|
}
|
82
88
|
// ACTUALLY: this is already handled by the extension_utils where we resolve json pointers recursively
|
83
89
|
// if(data.startsWith("/nodes/")){
|
@@ -1,9 +1,9 @@
|
|
1
|
-
import { GLTF } from "three/examples/jsm/loaders/GLTFLoader.js";
|
1
|
+
import { type GLTF } from "three/examples/jsm/loaders/GLTFLoader.js";
|
2
2
|
import { getParam } from "./engine_utils.js";
|
3
3
|
import { AnimationClip, Material, Mesh, Object3D, Texture } from "three";
|
4
4
|
import { Context } from "./engine_setup.js";
|
5
5
|
import { isPersistentAsset } from "./extensions/NEEDLE_persistent_assets.js";
|
6
|
-
import { ConstructorConcrete, SourceIdentifier } from "./engine_types.js";
|
6
|
+
import { type ConstructorConcrete, type SourceIdentifier } from "./engine_types.js";
|
7
7
|
import { debugExtension } from "../engine/engine_default_parameters.js";
|
8
8
|
import { LogType, addLog } from "./debug/debug_overlay.js";
|
9
9
|
import { isLocalNetwork } from "./engine_networking_utils.js";
|
@@ -182,6 +182,8 @@
|
|
182
182
|
context?: Context;
|
183
183
|
path?: string;
|
184
184
|
type?: ConstructorConcrete<any>;
|
185
|
+
/** the serializable attribute for this field (target.path) */
|
186
|
+
serializable?: any;
|
185
187
|
/** holds information if a field was undefined before serialization. This gives us info if we might want to warn the user about missing attributes */
|
186
188
|
implementationInformation?: ImplementationInformation;
|
187
189
|
|
@@ -289,6 +291,7 @@
|
|
289
291
|
|
290
292
|
context.type = undefined;
|
291
293
|
context.path = key;
|
294
|
+
context.serializable = serializedEntryInfo
|
292
295
|
|
293
296
|
if (obj.onBeforeDeserializeMember !== undefined) {
|
294
297
|
// callback to the instance, if it returns true assume it's done all the things itself
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { Constructor } from "./engine_types.js";
|
1
|
+
import { type Constructor } from "./engine_types.js";
|
2
2
|
|
3
3
|
export declare type TypeResolver<T> = (data) => Constructor<T> | null;
|
4
4
|
|
@@ -1,7 +1,7 @@
|
|
1
1
|
|
2
2
|
import * as loader from "./engine_fileloader.js"
|
3
3
|
import * as SHADERDATA from "./shaders/shaderData.js"
|
4
|
-
import { Vector4, FileLoader, DataTexture, RGBAFormat, Color
|
4
|
+
import { Vector4, FileLoader, DataTexture, RGBAFormat, Color } from "three";
|
5
5
|
import { RGBAColor } from "../engine-components/js-extensions/RGBAColor.js";
|
6
6
|
import { Mathf } from "./engine_math.js";
|
7
7
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { Clock } from 'three'
|
2
2
|
import { getParam } from './engine_utils.js';
|
3
|
-
import { ITime } from './engine_types.js';
|
3
|
+
import { type ITime } from './engine_types.js';
|
4
4
|
|
5
5
|
const timescaleUrl = getParam("timescale");
|
6
6
|
let timeScale = 1;
|
@@ -1,9 +1,10 @@
|
|
1
1
|
import { RenderTexture } from "./engine_texture.js";
|
2
|
-
import { Camera, Color, Material, Object3D,
|
2
|
+
import type { Camera, Color, Material, Object3D, Quaternion, Ray, Scene, WebGLRenderer, Mesh } from "three";
|
3
|
+
import { Vector3 } from "three";
|
3
4
|
import { RGBAColor } from "../engine-components/js-extensions/RGBAColor.js";
|
4
|
-
import { CollisionDetectionMode, PhysicsMaterial, RigidbodyConstraints } from "./engine_physics.types.js";
|
5
|
+
import { CollisionDetectionMode, type PhysicsMaterial, RigidbodyConstraints } from "./engine_physics.types.js";
|
5
6
|
import { CircularBuffer } from "./engine_utils.js";
|
6
|
-
import { GLTF as GLTF3 } from "three/examples/jsm/loaders/GLTFLoader.js";
|
7
|
+
import { type GLTF as GLTF3 } from "three/examples/jsm/loaders/GLTFLoader.js";
|
7
8
|
|
8
9
|
export type GLTF = GLTF3 & {
|
9
10
|
// asset: { generator: string, version: string }
|
@@ -1,8 +1,8 @@
|
|
1
1
|
import { $isAssigningProperties } from "./engine_serialization_core.js";
|
2
2
|
import { LogType, isDevEnvironment, showBalloonMessage } from "./debug/index.js";
|
3
|
-
import { Constructor, IComponent } from "./engine_types.js";
|
3
|
+
import { type Constructor, type IComponent } from "./engine_types.js";
|
4
4
|
import { Quaternion, Vector2, Vector3, Vector4 } from "three";
|
5
|
-
import {
|
5
|
+
import { watchWrite } from "./engine_utils.js";
|
6
6
|
|
7
7
|
|
8
8
|
declare type setter = (v: any) => void;
|
@@ -1,6 +1,6 @@
|
|
1
1
|
// use for typesafe interface method calls
|
2
|
-
import { Quaternion, Vector, Vector2, Vector3, Vector4 } from "three";
|
3
|
-
import { SourceIdentifier } from "./engine_types.js";
|
2
|
+
import { Quaternion, type Vector, Vector2, Vector3, Vector4 } from "three";
|
3
|
+
import { type SourceIdentifier } from "./engine_types.js";
|
4
4
|
|
5
5
|
// https://schneidenbach.gitbooks.io/typescript-cookbook/content/nameof-operator.html
|
6
6
|
export const nameofFactory = <T>() => (name: keyof T) => name;
|
@@ -458,6 +458,20 @@
|
|
458
458
|
return navigator.userAgent.includes("OculusBrowser");
|
459
459
|
}
|
460
460
|
|
461
|
+
export async function microphonePermissionsGranted() {
|
462
|
+
try {
|
463
|
+
//@ts-ignore
|
464
|
+
const res = await navigator.permissions.query({ name: 'microphone' });
|
465
|
+
if (res.state === "denied") {
|
466
|
+
return false;
|
467
|
+
}
|
468
|
+
return true;
|
469
|
+
}
|
470
|
+
catch (err) {
|
471
|
+
console.error("Error querying `microphone` permissions.", err);
|
472
|
+
return false;
|
473
|
+
}
|
474
|
+
}
|
461
475
|
|
462
476
|
|
463
477
|
const cloudflareIPRegex = /ip=(?<ip>.+?)\n/s;
|
@@ -4,16 +4,15 @@
|
|
4
4
|
import { ControllerEvents, WebXRController } from "../webxr/WebXRController.js";
|
5
5
|
import * as ThreeMeshUI from 'three-mesh-ui'
|
6
6
|
import { Context } from "../../engine/engine_setup.js";
|
7
|
-
import {
|
8
|
-
import { IPointerEventHandler, PointerEventData } from "./PointerEvents.js";
|
7
|
+
import { type IPointerEventHandler, PointerEventData } from "./PointerEvents.js";
|
9
8
|
import { ObjectRaycaster, Raycaster } from "./Raycaster.js";
|
10
9
|
import { InputEvents, NEPointerEvent, PointerType } from "../../engine/engine_input.js";
|
11
10
|
import { Object3D } from "three";
|
12
|
-
import { ICanvasGroup
|
11
|
+
import type { ICanvasGroup } from "./Interfaces.js";
|
13
12
|
import { getParam } from "../../engine/engine_utils.js";
|
14
13
|
import { UIRaycastUtils } from "./RaycastUtils.js";
|
15
14
|
import { $shadowDomOwner } from "./BaseUIComponent.js";
|
16
|
-
import { isDevEnvironment, showBalloonMessage
|
15
|
+
import { isDevEnvironment, showBalloonMessage } from "../../engine/debug/index.js";
|
17
16
|
import { Mathf } from "../../engine/engine_math.js";
|
18
17
|
|
19
18
|
const debug = getParam("debugeventsystem");
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { serializable } from "../engine/engine_serialization.js";
|
2
2
|
import { EventList } from "./EventList.js";
|
3
|
-
import { IPointerEventHandler, PointerEventData } from "./ui/PointerEvents.js"
|
3
|
+
import type { IPointerEventHandler, PointerEventData } from "./ui/PointerEvents.js"
|
4
4
|
import { Behaviour } from "./Component.js"
|
5
5
|
import { EventType } from "./EventType.js"
|
6
6
|
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import { getParam } from "../engine_utils.js";
|
2
2
|
import { EXRLoader } from "three/examples/jsm/loaders/EXRLoader.js";
|
3
3
|
import { Texture } from "three";
|
4
|
-
import { GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
4
|
+
import { type GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
5
5
|
|
6
6
|
|
7
7
|
const debug = getParam("debugexr");
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { IExtensionReferenceResolver } from "./extension_resolver.js";
|
1
|
+
import { type IExtensionReferenceResolver } from "./extension_resolver.js";
|
2
2
|
import { GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
3
3
|
import { debugExtension } from "../engine_default_parameters.js";
|
4
4
|
import { getParam } from "../engine_utils.js";
|
@@ -8,14 +8,14 @@
|
|
8
8
|
import { NEEDLE_persistent_assets } from "./NEEDLE_persistent_assets.js";
|
9
9
|
// import { KHR_animation_pointer } from "./KHR_animation_pointer.js";
|
10
10
|
import { NEEDLE_lightmaps } from "../extensions/NEEDLE_lightmaps.js";
|
11
|
-
import {
|
11
|
+
import { type ConstructorConcrete, type SourceIdentifier } from "../engine_types.js";
|
12
12
|
import { Context } from "../engine_setup.js";
|
13
13
|
import { NEEDLE_lighting_settings } from "./NEEDLE_lighting_settings.js";
|
14
14
|
import { NEEDLE_render_objects } from "./NEEDLE_render_objects.js";
|
15
15
|
import { NEEDLE_progressive } from "./NEEDLE_progressive.js";
|
16
16
|
import { InternalUsageTrackerPlugin } from "./usage_tracker.js";
|
17
17
|
import { isResourceTrackingEnabled } from "../engine_assetdatabase.js";
|
18
|
-
import { GLTFLoaderPlugin } from "three/examples/jsm/loaders/GLTFLoader.js";
|
18
|
+
import { type GLTFLoaderPlugin } from "three/examples/jsm/loaders/GLTFLoader.js";
|
19
19
|
import { getParam } from "../engine_utils.js";
|
20
20
|
import { isDevEnvironment } from "../debug/index.js";
|
21
21
|
// import { GLTFAnimationPointerExtension } from "three/examples/jsm/loaders/GLTFLoaderAnimationPointer.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { Object3D } from "three";
|
2
|
-
import { Constructor } from "../../engine/engine_types.js";
|
2
|
+
import type { Constructor } from "../../engine/engine_types.js";
|
3
3
|
|
4
4
|
const handlers: Map<any, ApplyPrototypeExtension> = new Map();
|
5
5
|
|
@@ -131,7 +131,7 @@
|
|
131
131
|
this.ext!.context = serializationContext;
|
132
132
|
|
133
133
|
return new Promise((resolve, reject) => {
|
134
|
-
|
134
|
+
if (debugExport) console.log("Starting glTF export.")
|
135
135
|
try {
|
136
136
|
// Parse the input and generate the glTF output
|
137
137
|
this.exporter?.parse(
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { IGraphic, IRectTransformChangedReceiver } from './Interfaces.js';
|
1
|
+
import { type IGraphic, type IRectTransformChangedReceiver } from './Interfaces.js';
|
2
2
|
import * as ThreeMeshUI from 'three-mesh-ui'
|
3
3
|
import { RGBAColor } from "../js-extensions/RGBAColor.js"
|
4
4
|
import { BaseUIComponent } from "./BaseUIComponent.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { Behaviour, GameObject } from "../Component.js";
|
2
|
-
import {
|
2
|
+
import { type IPointerEventHandler } from "./PointerEvents.js";
|
3
3
|
import { FrameEvent } from "../../engine/engine_setup.js";
|
4
4
|
import { serializable } from "../../engine/engine_serialization_decorator.js";
|
5
5
|
import { Text } from "./Text.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { Behaviour } from "./Component.js";
|
2
|
-
import { IPointerClickHandler, PointerEventData } from "./ui/PointerEvents.js";
|
2
|
+
import type { IPointerClickHandler, PointerEventData } from "./ui/PointerEvents.js";
|
3
3
|
|
4
4
|
|
5
5
|
export class Interactable extends Behaviour implements IPointerClickHandler {
|
@@ -1,5 +1,4 @@
|
|
1
|
-
import {
|
2
|
-
import { IComponent } from "../../engine/engine_types.js";
|
1
|
+
import { type IComponent } from "../../engine/engine_types.js";
|
3
2
|
|
4
3
|
export interface ICanvas extends IComponent {
|
5
4
|
get isCanvas(): boolean;
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { ILayoutGroup, IRectTransform
|
1
|
+
import { type ILayoutGroup, type IRectTransform } from "./Interfaces.js";
|
2
2
|
import { Behaviour, GameObject } from "../Component.js";
|
3
3
|
import { serializable } from "../../engine/engine_serialization.js";
|
4
4
|
import { Canvas } from "./Canvas.js";
|
@@ -7,9 +7,7 @@
|
|
7
7
|
import { Color, DirectionalLight, OrthographicCamera } from "three";
|
8
8
|
import { WebXR, WebXREvent } from "./webxr/WebXR.js";
|
9
9
|
import { WebARSessionRoot } from "./webxr/WebARSessionRoot.js";
|
10
|
-
import { ILight } from "../engine/engine_types.js";
|
11
|
-
import { Mathf } from "../engine/engine_math.js";
|
12
|
-
import { isLocalNetwork } from "../engine/engine_networking_utils.js";
|
10
|
+
import type { ILight } from "../engine/engine_types.js";
|
13
11
|
|
14
12
|
// https://threejs.org/examples/webgl_shadowmap_csm.html
|
15
13
|
|
@@ -4,7 +4,7 @@
|
|
4
4
|
import { getWorldPosition, getWorldQuaternion, setWorldQuaternion } from "../../engine/engine_three_utils.js";
|
5
5
|
|
6
6
|
import { USDObject } from "../../engine-components/export/usdz/ThreeUSDZExporter.js";
|
7
|
-
import { UsdzBehaviour } from "../../engine-components/export/usdz/extensions/behavior/Behaviour.js";
|
7
|
+
import { type UsdzBehaviour } from "../../engine-components/export/usdz/extensions/behavior/Behaviour.js";
|
8
8
|
import { ActionBuilder, BehaviorModel, TriggerBuilder, USDVec3 } from "../../engine-components/export/usdz/extensions/behavior/BehavioursBuilder.js";
|
9
9
|
|
10
10
|
export class LookAt extends Behaviour implements UsdzBehaviour {
|
@@ -1,5 +1,5 @@
|
|
1
|
-
import { GLTF, GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
2
|
-
import { NodeToObjectMap, ObjectToNodeMap, SerializationContext } from "../engine_serialization_core.js";
|
1
|
+
import { type GLTF, type GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
2
|
+
import { type NodeToObjectMap, type ObjectToNodeMap, SerializationContext } from "../engine_serialization_core.js";
|
3
3
|
import { GLTFExporter } from 'three/examples/jsm/exporters/GLTFExporter.js';
|
4
4
|
import { debugExtension } from "../engine_default_parameters.js";
|
5
5
|
import { builtinComponentKeyName } from "../engine_constants.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { Object3D } from "three";
|
2
|
-
import { GLTF, GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
2
|
+
import { type GLTF, type GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
3
3
|
|
4
4
|
export const EXTENSION_NAME = "NEEDLE_gameobject_data";
|
5
5
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { AmbientLight, Color, HemisphereLight, Object3D } from "three";
|
2
|
-
import { GLTF, GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
3
|
-
import { SourceIdentifier } from "../engine_types.js";
|
2
|
+
import { type GLTF, type GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
3
|
+
import { type SourceIdentifier } from "../engine_types.js";
|
4
4
|
import { Behaviour, GameObject } from "../../engine-components/Component.js";
|
5
5
|
import { AmbientMode, DefaultReflectionMode } from "../engine_scenelighting.js";
|
6
6
|
import { LightmapType } from "./NEEDLE_lightmaps.js";
|
@@ -1,7 +1,7 @@
|
|
1
|
-
import { ILightDataRegistry } from "../engine_lightdata.js";
|
2
|
-
import {
|
3
|
-
import { GLTF, GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
4
|
-
import { SourceIdentifier } from "../engine_types.js";
|
1
|
+
import { type ILightDataRegistry } from "../engine_lightdata.js";
|
2
|
+
import { LinearSRGBColorSpace, SRGBColorSpace, Texture, TextureLoader } from "three";
|
3
|
+
import { type GLTF, type GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
4
|
+
import { type SourceIdentifier } from "../engine_types.js";
|
5
5
|
import { resolveReferences } from "./extension_utils.js";
|
6
6
|
import { getParam, PromiseAllWithErrors, resolveUrl } from "../engine_utils.js";
|
7
7
|
import { EXRLoader } from "three/examples/jsm/loaders/EXRLoader.js";
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { resolveReferences } from "./extension_utils.js";
|
2
|
-
import { GLTF, GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
3
|
-
import { IExtensionReferenceResolver } from "./extension_resolver.js";
|
2
|
+
import { type GLTF, type GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
3
|
+
import { type IExtensionReferenceResolver } from "./extension_resolver.js";
|
4
4
|
import { debugExtension } from "../engine_default_parameters.js";
|
5
5
|
import { TypeStore } from "../engine_typestore.js";
|
6
6
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { Material, RawShaderMaterial, Texture, TextureLoader } from "three";
|
2
|
-
import { GLTF, GLTFLoader, GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
3
|
-
import { SourceIdentifier } from "../engine_types.js";
|
2
|
+
import { type GLTF, GLTFLoader, type GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
3
|
+
import { type SourceIdentifier } from "../engine_types.js";
|
4
4
|
import { Context } from "../engine_setup.js";
|
5
5
|
import { addDracoAndKTX2Loaders } from "../engine_loaders.js";
|
6
6
|
import { PromiseAllWithErrors, PromiseErrorResult, delay, getParam, resolveUrl } from "../engine_utils.js";
|
@@ -1,7 +1,7 @@
|
|
1
1
|
|
2
|
-
import { SourceIdentifier } from "../engine_types.js";
|
3
|
-
import { GLTF, GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
4
|
-
import { IComponent as Component, IRenderer } from "../engine_types.js";
|
2
|
+
import { type SourceIdentifier } from "../engine_types.js";
|
3
|
+
import { type GLTF, type GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
4
|
+
import { type IComponent as Component, type IRenderer } from "../engine_types.js";
|
5
5
|
|
6
6
|
import {
|
7
7
|
// stencil funcs
|
@@ -22,8 +22,8 @@
|
|
22
22
|
IncrementWrapStencilOp,
|
23
23
|
DecrementWrapStencilOp,
|
24
24
|
InvertStencilOp,
|
25
|
-
StencilFunc,
|
26
|
-
StencilOp as ThreeStencilOp,
|
25
|
+
type StencilFunc,
|
26
|
+
type StencilOp as ThreeStencilOp,
|
27
27
|
} from "three";
|
28
28
|
import { getParam } from "../engine_utils.js";
|
29
29
|
import { showBalloonWarning } from "../debug/index.js";
|
@@ -1,11 +1,11 @@
|
|
1
|
-
import { GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
1
|
+
import { type GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
2
2
|
import { FindShaderTechniques, whiteDefaultTexture, ToUnityMatrixArray, SetUnitySphericalHarmonics } from '../engine_shaders.js';
|
3
|
-
import { AlwaysDepth, BackSide, Camera, DoubleSide, EqualDepth, FrontSide, GLSL3, GreaterDepth, GreaterEqualDepth, IUniform, LessDepth, LessEqualDepth,
|
3
|
+
import { AlwaysDepth, BackSide, Camera, DoubleSide, EqualDepth, FrontSide, GLSL3, GreaterDepth, GreaterEqualDepth, type IUniform, LessDepth, LessEqualDepth, LinearSRGBColorSpace, Material, Matrix4, NotEqualDepth, Object3D, RawShaderMaterial, Texture, Vector3, Vector4 } from 'three';
|
4
4
|
import { Context } from '../engine_setup.js';
|
5
5
|
import { getParam } from "../engine_utils.js";
|
6
6
|
import * as SHADERDATA from "../shaders/shaderData.js"
|
7
|
-
import { SourceIdentifier } from "../engine_types.js";
|
8
|
-
import { ILight } from "../engine_types.js";
|
7
|
+
import { type SourceIdentifier } from "../engine_types.js";
|
8
|
+
import { type ILight } from "../engine_types.js";
|
9
9
|
import { getWorldPosition } from "../engine_three_utils.js";
|
10
10
|
|
11
11
|
const debug = getParam("debugcustomshader");
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { getParam } from "../engine/engine_utils.js";
|
2
2
|
import { Behaviour } from "../engine-components/Component.js";
|
3
|
-
import { AssetReference, ProgressCallback } from "../engine/engine_addressables.js";
|
3
|
+
import { AssetReference, type ProgressCallback } from "../engine/engine_addressables.js";
|
4
4
|
import { serializable } from "../engine/engine_serialization_decorator.js";
|
5
5
|
import { InstantiateIdProvider } from "../engine/engine_networking_instantiate.js";
|
6
6
|
import { InstantiateOptions } from "../engine/engine_gameobject.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { serializable } from "../engine/engine_serialization.js";
|
2
|
-
import { INetworkingWebsocketUrlProvider } from "../engine/engine_networking.js";
|
2
|
+
import type { INetworkingWebsocketUrlProvider } from "../engine/engine_networking.js";
|
3
3
|
import { isLocalNetwork } from "../engine/engine_networking_utils.js";
|
4
4
|
import { getParam } from "../engine/engine_utils.js";
|
5
5
|
import { Behaviour } from "./Component.js";
|
@@ -1,7 +1,6 @@
|
|
1
1
|
import { applyPrototypeExtensions, registerPrototypeExtensions } from "./ExtensionUtils.js";
|
2
2
|
import { Object3D } from "three";
|
3
|
-
import { Constructor, ConstructorConcrete, IComponent } from "../../engine/engine_types.js"
|
4
|
-
import { IComponent as Component } from "../../engine/engine_types.js";
|
3
|
+
import type { Constructor, ConstructorConcrete, IComponent, IComponent as Component } from "../../engine/engine_types.js";
|
5
4
|
import { moveComponentInstance, addNewComponent, getComponent, getComponentInChildren, getComponentInParent, getComponents, getComponentsInChildren, getComponentsInParent, getOrAddComponent, removeComponent } from "../../engine/engine_components.js";
|
6
5
|
import { isActiveSelf, setActive, destroy } from "../../engine/engine_gameobject.js";
|
7
6
|
|
@@ -1,5 +1,5 @@
|
|
1
1
|
|
2
|
-
import { IPointerClickHandler, PointerEventData } from "../ui/index.js";
|
2
|
+
import { type IPointerClickHandler, PointerEventData } from "../ui/index.js";
|
3
3
|
import { Behaviour } from "../Component.js";
|
4
4
|
import { serializable } from "../../engine/engine_serialization.js";
|
5
5
|
import { isDevEnvironment, showBalloonMessage } from "../../engine/debug/index.js";
|
@@ -8,8 +8,8 @@
|
|
8
8
|
|
9
9
|
import { Camera as ThreeCamera, Box3, Object3D, PerspectiveCamera, Vector2, Vector3, Box3Helper, GridHelper, Mesh, ShadowMaterial, RGBA_ASTC_10x10_Format } from "three";
|
10
10
|
import { OrbitControls as ThreeOrbitControls } from "three/examples/jsm/controls/OrbitControls.js";
|
11
|
-
import { AfterHandleInputEvent, EventSystem, EventSystemEvents } from "./ui/EventSystem.js";
|
12
|
-
import { ICameraController } from "../engine/engine_types.js";
|
11
|
+
import { type AfterHandleInputEvent, EventSystem, EventSystemEvents } from "./ui/EventSystem.js";
|
12
|
+
import type { ICameraController } from "../engine/engine_types.js";
|
13
13
|
import { setCameraController } from "../engine/engine_camera.js";
|
14
14
|
import { SyncedTransform } from "./SyncedTransform.js";
|
15
15
|
import { tryGetUIComponent } from "./ui/Utils.js";
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { Behaviour, GameObject } from "./Component.js";
|
2
2
|
import * as THREE from "three";
|
3
|
-
import { MainModule, EmissionModule, ShapeModule, ParticleSystemShapeType, MinMaxCurve, MinMaxGradient, ColorOverLifetimeModule, SizeOverLifetimeModule, NoiseModule, ParticleSystemSimulationSpace, ParticleBurst, IParticleSystem, ParticleSystemRenderMode, TrailModule, VelocityOverLifetimeModule, TextureSheetAnimationModule, RotationOverLifetimeModule, LimitVelocityOverLifetimeModule, RotationBySpeedModule, InheritVelocityModule, SizeBySpeedModule, ColorBySpeedModule, ParticleSystemScalingMode } from "./ParticleSystemModules.js"
|
3
|
+
import { MainModule, EmissionModule, ShapeModule, ParticleSystemShapeType, MinMaxCurve, MinMaxGradient, ColorOverLifetimeModule, SizeOverLifetimeModule, NoiseModule, ParticleSystemSimulationSpace, ParticleBurst, type IParticleSystem, ParticleSystemRenderMode, TrailModule, VelocityOverLifetimeModule, TextureSheetAnimationModule, RotationOverLifetimeModule, LimitVelocityOverLifetimeModule, RotationBySpeedModule, InheritVelocityModule, SizeBySpeedModule, ColorBySpeedModule, ParticleSystemScalingMode } from "./ParticleSystemModules.js"
|
4
4
|
import { getParam } from "../engine/engine_utils.js";
|
5
5
|
|
6
6
|
// https://github.dev/creativelifeform/three-nebula
|
@@ -11,7 +11,8 @@
|
|
11
11
|
import { AxesHelper, BufferGeometry, Color, Material, Matrix4, Mesh, MeshStandardMaterial, Object3D, OneMinusDstAlphaFactor, PlaneGeometry, Quaternion, Sprite, SpriteMaterial, Vector3, Vector4 } from "three";
|
12
12
|
import { getWorldPosition, getWorldQuaternion, getWorldScale, setWorldScale } from "../engine/engine_three_utils.js";
|
13
13
|
import { assign } from "../engine/engine_serialization_core.js";
|
14
|
-
import {
|
14
|
+
import { ParticleSystem as _ParticleSystem, ConstantValue, ConstantColor, BatchedParticleRenderer, TrailBatch, TrailParticle, RenderMode } from "three.quarks";
|
15
|
+
import type { BatchedRenderer, Behavior, BillBoardSettings, BurstParameters, ColorGenerator, EmissionState, EmitSubParticleSystem, EmitterShape, FunctionColorGenerator, FunctionJSON, FunctionValueGenerator, IntervalValue, MeshSettings, Particle, ParticleEmitter, ParticleSystemParameters, PointEmitter, RecordState, RotationGenerator, SizeOverLife, TrailSettings, ValueGenerator } from "three.quarks";
|
15
16
|
import { createFlatTexture } from "../engine/engine_shaders.js";
|
16
17
|
import { Mathf } from "../engine/engine_math.js";
|
17
18
|
import { Context } from "../engine/engine_setup.js";
|
@@ -1,12 +1,12 @@
|
|
1
|
-
import {
|
1
|
+
import { Matrix4, Object3D, Quaternion, Vector3, Vector2, Euler, Vector4 } from "three";
|
2
2
|
import { Mathf } from "../engine/engine_math.js";
|
3
3
|
import { serializable } from "../engine/engine_serialization.js";
|
4
4
|
import { RGBAColor } from "./js-extensions/RGBAColor.js";
|
5
5
|
import { AnimationCurve } from "./AnimationCurve.js";
|
6
|
-
import { Vec2, Vec3 } from "../engine/engine_types.js";
|
6
|
+
import type { Vec2, Vec3 } from "../engine/engine_types.js";
|
7
7
|
import { Context } from "../engine/engine_setup.js";
|
8
|
-
import { EmitterShape,
|
9
|
-
import { createNoise4D, NoiseFunction4D } from 'simplex-noise';
|
8
|
+
import type { EmitterShape, Particle, ShapeJSON } from "three.quarks";
|
9
|
+
import { createNoise4D, type NoiseFunction4D } from 'simplex-noise';
|
10
10
|
import { Gizmos } from "../engine/engine_gizmos.js";
|
11
11
|
import { getParam } from "../engine/engine_utils.js";
|
12
12
|
|
@@ -1,6 +1,6 @@
|
|
1
|
-
import { Behavior, Particle, EmissionState, ParticleSystem
|
1
|
+
import { type Behavior, type Particle, type EmissionState, type ParticleSystem } from "three.quarks";
|
2
2
|
import { Vector3, Quaternion, Matrix4 } from "three";
|
3
|
-
import { IParticleSystem } from "./ParticleSystemModules.js";
|
3
|
+
import type { IParticleSystem } from "./ParticleSystemModules.js";
|
4
4
|
import { CircularBuffer } from "../engine/engine_utils.js";
|
5
5
|
import { $particleLife, SubEmitterType } from "./ParticleSystem.js";
|
6
6
|
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { registerCustomEffectType } from "../VolumeProfile.js";
|
2
|
-
import { EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
2
|
+
import { type EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
3
3
|
import { PixelationEffect as PixelationEffectPP } from "postprocessing";
|
4
4
|
import { VolumeParameter } from "../VolumeParameter.js";
|
5
5
|
import { serializable } from "../../../engine/engine_serialization.js";
|
@@ -7,7 +7,7 @@
|
|
7
7
|
import * as Models from "./TimelineModels.js";
|
8
8
|
import * as Tracks from "./TimelineTracks.js";
|
9
9
|
import { deepClone, delay, getParam } from '../../engine/engine_utils.js';
|
10
|
-
import { GuidsMap } from '../../engine/engine_types.js';
|
10
|
+
import type { GuidsMap } from '../../engine/engine_types.js';
|
11
11
|
import { Object3D, Quaternion, Vector3 } from 'three';
|
12
12
|
import { isLocalNetwork } from '../../engine/engine_networking_utils.js';
|
13
13
|
import { FrameEvent } from '../../engine/engine_context.js';
|
@@ -2,8 +2,8 @@
|
|
2
2
|
import { Effect, Pass } from "postprocessing";
|
3
3
|
import { VolumeParameter } from "./VolumeParameter.js";
|
4
4
|
import { Component } from "../Component.js";
|
5
|
-
import { ISerializable, SerializationContext } from "../../engine/engine_serialization_core.js";
|
6
|
-
import { EditorModification, IEditorModification } from "../../engine/engine_editor-sync.js";
|
5
|
+
import type { ISerializable, SerializationContext } from "../../engine/engine_serialization_core.js";
|
6
|
+
import type { EditorModification, IEditorModification } from "../../engine/engine_editor-sync.js";
|
7
7
|
import { getParam } from "../../engine/engine_utils.js";
|
8
8
|
|
9
9
|
const debug = getParam("debugpost");
|
@@ -5,7 +5,7 @@
|
|
5
5
|
import { showBalloonWarning } from "../../engine/debug/index.js";
|
6
6
|
import { Camera } from "../Camera.js";
|
7
7
|
import { PostProcessingEffect } from "./PostProcessingEffect.js";
|
8
|
-
import { Constructor } from "../../engine/engine_types.js";
|
8
|
+
import type { Constructor } from "../../engine/engine_types.js";
|
9
9
|
import { N8AOPostPass } from "n8ao";
|
10
10
|
|
11
11
|
const debug = getParam("debugpost");
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { Behaviour } from "../engine-components/Component.js";
|
2
|
-
import { KeyCode } from "../engine/engine_input.js";
|
2
|
+
import type { KeyCode } from "../engine/engine_input.js";
|
3
3
|
|
4
4
|
export class PresentationMode extends Behaviour {
|
5
5
|
|
@@ -1,8 +1,7 @@
|
|
1
1
|
import { foreachComponent } from "../../engine/engine_gameobject.js";
|
2
|
-
import {
|
3
|
-
import { IComponent } from "../../engine/engine_types.js";
|
2
|
+
import { type IComponent } from "../../engine/engine_types.js";
|
4
3
|
import { $shadowDomOwner } from "./BaseUIComponent.js";
|
5
|
-
import { ICanvasGroup, IGraphic } from "./Interfaces.js";
|
4
|
+
import { type ICanvasGroup, type IGraphic } from "./Interfaces.js";
|
6
5
|
import { Object3D } from "three";
|
7
6
|
|
8
7
|
|
@@ -1,12 +1,12 @@
|
|
1
1
|
import * as ThreeMeshUI from 'three-mesh-ui'
|
2
2
|
import { BaseUIComponent } from "./BaseUIComponent.js";
|
3
|
-
import { DocumentedOptions as ThreeMeshUIEveryOptions } from "three-mesh-ui/build/types/core/elements/MeshUIBaseElement.js";
|
3
|
+
import { type DocumentedOptions as ThreeMeshUIEveryOptions } from "three-mesh-ui/build/types/core/elements/MeshUIBaseElement.js";
|
4
4
|
import { serializable } from "../../engine/engine_serialization_decorator.js";
|
5
5
|
import { Matrix4, Object3D, Quaternion, Vector2, Vector3 } from "three";
|
6
6
|
import { getParam } from "../../engine/engine_utils.js";
|
7
7
|
import { onChange } from "./Utils.js";
|
8
8
|
import { foreachComponentEnumerator } from "../../engine/engine_gameobject.js";
|
9
|
-
import { ICanvas, IRectTransform, IRectTransformChangedReceiver } from "./Interfaces.js";
|
9
|
+
import { type ICanvas, type IRectTransform, type IRectTransformChangedReceiver } from "./Interfaces.js";
|
10
10
|
import { GameObject } from '../Component.js';
|
11
11
|
|
12
12
|
const debug = getParam("debugui");
|
@@ -1,9 +1,8 @@
|
|
1
1
|
import { Behaviour } from "./Component.js";
|
2
|
-
import {
|
2
|
+
import { EquirectangularReflectionMapping, Material, Object3D, SRGBColorSpace, Texture, Vector3 } from "three";
|
3
3
|
import { serializable } from "../engine/engine_serialization.js";
|
4
4
|
import { Context } from "../engine/engine_setup.js";
|
5
|
-
import {
|
6
|
-
import { IRenderer } from "../engine/engine_types.js";
|
5
|
+
import type { IRenderer } from "../engine/engine_types.js";
|
7
6
|
import { BoxHelperComponent } from "./BoxHelperComponent.js";
|
8
7
|
import { getParam } from "../engine/engine_utils.js";
|
9
8
|
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { TypeStore } from "./../engine_typestore.js"
|
2
|
-
|
2
|
+
|
3
3
|
// Import types
|
4
4
|
import { __Ignore } from "../../engine-components/codegen/components.js";
|
5
5
|
import { ActionBuilder } from "../../engine-components/export/usdz/extensions/behavior/BehavioursBuilder.js";
|
@@ -217,7 +217,7 @@
|
|
217
217
|
import { XRGrabRendering } from "../../engine-components/webxr/WebXRGrabRendering.js";
|
218
218
|
import { XRRig } from "../../engine-components/webxr/WebXRRig.js";
|
219
219
|
import { XRState } from "../../engine-components/XRFlag.js";
|
220
|
-
|
220
|
+
|
221
221
|
// Register types
|
222
222
|
TypeStore.add("__Ignore", __Ignore);
|
223
223
|
TypeStore.add("ActionBuilder", ActionBuilder);
|
@@ -9,7 +9,7 @@
|
|
9
9
|
import { NEEDLE_render_objects } from "../engine/extensions/NEEDLE_render_objects.js";
|
10
10
|
import { NEEDLE_progressive } from "../engine/extensions/NEEDLE_progressive.js";
|
11
11
|
import { NEED_UPDATE_INSTANCE_KEY } from "../engine/engine_instancing.js";
|
12
|
-
import { IRenderer, ISharedMaterials } from "../engine/engine_types.js";
|
12
|
+
import type { IRenderer, ISharedMaterials } from "../engine/engine_types.js";
|
13
13
|
import { ReflectionProbe } from "./ReflectionProbe.js";
|
14
14
|
import { setCustomVisibility } from "../engine/js-extensions/Layers.js";
|
15
15
|
import { isLocalNetwork } from "../engine/engine_networking_utils.js";
|
@@ -1,6 +1,5 @@
|
|
1
|
-
import {
|
2
|
-
import {
|
3
|
-
import { Context, OnRenderCallback } from "../engine/engine_setup.js";
|
1
|
+
import { Material, Mesh, type Shader, ShaderMaterial, Texture, Vector4 } from "three";
|
2
|
+
import type { Context, OnRenderCallback } from "../engine/engine_setup.js";
|
4
3
|
import { getParam } from "../engine/engine_utils.js";
|
5
4
|
|
6
5
|
const debug = getParam("debuglightmaps");
|
@@ -4,7 +4,7 @@
|
|
4
4
|
import { serializable } from "../engine/engine_serialization_decorator.js";
|
5
5
|
import { Watch } from "../engine/engine_utils.js";
|
6
6
|
import { Matrix4, Object3D, Vector3 } from "three";
|
7
|
-
import { IRigidbody } from "../engine/engine_types.js";
|
7
|
+
import type { IRigidbody, Vec3 } from "../engine/engine_types.js";
|
8
8
|
import { CollisionDetectionMode, RigidbodyConstraints } from "../engine/engine_physics.types.js";
|
9
9
|
import { validate } from "../engine/engine_util_decorator.js";
|
10
10
|
import { Context, FrameEvent } from "../engine/engine_setup.js";
|
@@ -307,12 +307,12 @@
|
|
307
307
|
this.resetVelocities();
|
308
308
|
}
|
309
309
|
|
310
|
-
public resetForces() {
|
311
|
-
this.context.physics.engine?.resetForces(this,
|
310
|
+
public resetForces(wakeup:boolean = true) {
|
311
|
+
this.context.physics.engine?.resetForces(this, wakeup);
|
312
312
|
}
|
313
313
|
|
314
|
-
public resetTorques() {
|
315
|
-
this.context.physics.engine?.resetTorques(this,
|
314
|
+
public resetTorques(wakeup:boolean = true) {
|
315
|
+
this.context.physics.engine?.resetTorques(this, wakeup);
|
316
316
|
}
|
317
317
|
|
318
318
|
public resetVelocities() {
|
@@ -329,17 +329,17 @@
|
|
329
329
|
this.context.physics.engine?.wakeup(this);
|
330
330
|
}
|
331
331
|
|
332
|
-
public applyForce(vec: Vector3, _rel?: THREE.Vector3) {
|
333
|
-
this.context.physics.engine?.addForce(this, vec,
|
332
|
+
public applyForce(vec: Vector3 | Vec3, _rel?: THREE.Vector3, wakeup: boolean = true) {
|
333
|
+
this.context.physics.engine?.addForce(this, vec, wakeup);
|
334
334
|
}
|
335
335
|
|
336
|
-
public applyImpulse(vec: Vector3) {
|
337
|
-
this.context.physics.engine?.applyImpulse(this, vec,
|
336
|
+
public applyImpulse(vec: Vector3 | Vec3, wakeup: boolean = true) {
|
337
|
+
this.context.physics.engine?.applyImpulse(this, vec, wakeup);
|
338
338
|
}
|
339
339
|
|
340
|
-
public setForce(x: number, y: number, z: number) {
|
341
|
-
this.context.physics.engine?.resetForces(this,
|
342
|
-
this.context.physics.engine?.addForce(this, { x, y, z },
|
340
|
+
public setForce(x: number, y: number, z: number, wakeup: boolean = true) {
|
341
|
+
this.context.physics.engine?.resetForces(this, wakeup);
|
342
|
+
this.context.physics.engine?.addForce(this, { x, y, z }, wakeup);
|
343
343
|
}
|
344
344
|
|
345
345
|
public getVelocity(): Vector3 {
|
@@ -351,24 +351,24 @@
|
|
351
351
|
return this._currentVelocity;
|
352
352
|
}
|
353
353
|
|
354
|
-
public setVelocity(x: number | Vector3, y?: number, z?: number) {
|
354
|
+
public setVelocity(x: number | Vector3, y?: number, z?: number, wakeup: boolean = true) {
|
355
355
|
if (x instanceof Vector3) {
|
356
356
|
const vec = x;
|
357
|
-
this.context.physics.engine?.setLinearVelocity(this,vec,
|
357
|
+
this.context.physics.engine?.setLinearVelocity(this, vec, wakeup);
|
358
358
|
return;
|
359
359
|
}
|
360
360
|
if (y === undefined || z === undefined) return;
|
361
|
-
this.context.physics.engine?.setLinearVelocity(this, { x: x, y: y, z: z },
|
361
|
+
this.context.physics.engine?.setLinearVelocity(this, { x: x, y: y, z: z }, wakeup);
|
362
362
|
}
|
363
363
|
|
364
|
-
public setAngularVelocity(x: number | Vector3, y?: number, z?: number) {
|
364
|
+
public setAngularVelocity(x: number | Vector3, y?: number, z?: number, wakeup: boolean = true) {
|
365
365
|
if (x instanceof Vector3) {
|
366
366
|
const vec = x;
|
367
|
-
this.context.physics.engine?.setAngularVelocity(this, vec,
|
367
|
+
this.context.physics.engine?.setAngularVelocity(this, vec, wakeup);
|
368
368
|
return;
|
369
369
|
}
|
370
370
|
if (y === undefined || z === undefined) return;
|
371
|
-
this.context.physics.engine?.setAngularVelocity(this, { x: x, y: y, z: z },
|
371
|
+
this.context.physics.engine?.setAngularVelocity(this, { x: x, y: y, z: z }, wakeup);
|
372
372
|
}
|
373
373
|
|
374
374
|
public getAngularVelocity(): Vector3 {
|
@@ -1,24 +1,22 @@
|
|
1
1
|
import { Behaviour, GameObject } from "./Component.js";
|
2
|
-
import { VideoPlayer } from "./VideoPlayer.js";
|
3
|
-
import Peer, { MediaConnection } from "peerjs"
|
4
|
-
import { Context } from "../engine/engine_setup.js";
|
5
|
-
import { RoomEvents } from "../engine/engine_networking.js";
|
6
|
-
import { UserJoinedOrLeftRoomModel } from "../engine/engine_networking.js";
|
2
|
+
import { AspectMode, VideoPlayer } from "./VideoPlayer.js";
|
7
3
|
import { serializable } from "../engine/engine_serialization.js";
|
8
|
-
import { IPointerClickHandler, PointerEventData } from "./ui/PointerEvents.js";
|
9
|
-
import { EventDispatcher } from "three";
|
4
|
+
import type { IPointerClickHandler, PointerEventData } from "./ui/PointerEvents.js";
|
10
5
|
import { AudioSource } from "./AudioSource.js";
|
11
6
|
import { getParam } from "../engine/engine_utils.js";
|
12
|
-
import { IModel } from "../engine/engine_networking_types.js";
|
13
7
|
import { showBalloonWarning } from "../engine/debug/index.js";
|
14
|
-
import {
|
8
|
+
import { NetworkedStreams, disposeStream, StreamReceivedEvent, StreamEndedEvent, PeerHandle, NetworkedStreamEvents } from "../engine/engine_networking_streams.js";
|
9
|
+
import { RoomEvents } from "../engine/engine_networking.js";
|
15
10
|
|
16
11
|
const debug = getParam("debugscreensharing");
|
17
12
|
|
18
13
|
export enum ScreenCaptureDevice {
|
19
14
|
Screen = 0,
|
20
15
|
Camera = 1,
|
21
|
-
|
16
|
+
/** Please note that canvas streaming might not work reliably on chrome: https://bugs.chromium.org/p/chromium/issues/detail?id=1156408 */
|
17
|
+
Canvas = 2,
|
18
|
+
/** When using Microphone only the voice will be sent */
|
19
|
+
Microphone = 3
|
22
20
|
}
|
23
21
|
|
24
22
|
export enum ScreenCaptureMode {
|
@@ -27,11 +25,6 @@
|
|
27
25
|
Receiving = 2
|
28
26
|
}
|
29
27
|
|
30
|
-
function disposeStream(str: MediaStream | null | undefined) {
|
31
|
-
if (!str) return;
|
32
|
-
for (const cap of str.getTracks())
|
33
|
-
cap.stop();
|
34
|
-
}
|
35
28
|
|
36
29
|
declare type ScreenCaptureOptions = {
|
37
30
|
device?: ScreenCaptureDevice,
|
@@ -44,6 +37,7 @@
|
|
44
37
|
|
45
38
|
export class ScreenCapture extends Behaviour implements IPointerClickHandler {
|
46
39
|
|
40
|
+
@serializable()
|
47
41
|
allowStartOnClick: boolean = true;
|
48
42
|
|
49
43
|
onPointerEnter() {
|
@@ -73,16 +67,36 @@
|
|
73
67
|
}
|
74
68
|
|
75
69
|
|
70
|
+
/** When enabled the stream will start when this component becomes active (enabled in the scene) */
|
71
|
+
@serializable()
|
72
|
+
autoConnect: boolean = false;
|
73
|
+
|
74
|
+
|
76
75
|
@serializable(VideoPlayer)
|
77
|
-
videoPlayer
|
76
|
+
set videoPlayer(val: VideoPlayer | undefined) {
|
77
|
+
if (this._videoPlayer && (this.isSending || this.isReceiving)) {
|
78
|
+
this._videoPlayer.stop();
|
79
|
+
}
|
80
|
+
this._videoPlayer = val;
|
81
|
+
if (this._videoPlayer && this._currentStream && (this.isSending || this.isReceiving)) {
|
82
|
+
this._videoPlayer.setVideo(this._currentStream);
|
83
|
+
}
|
84
|
+
}
|
85
|
+
get videoPlayer() { return this._videoPlayer; }
|
86
|
+
private _videoPlayer?: VideoPlayer;
|
87
|
+
private _audioSource?: AudioSource;
|
78
88
|
|
89
|
+
get screenspace() { return this.videoPlayer?.screenspace ?? false; }
|
90
|
+
set screenspace(v: boolean) { if (this.videoPlayer) this.videoPlayer.screenspace = v; }
|
91
|
+
|
92
|
+
// TODO: make this a property
|
93
|
+
/** Note: this can not be changed while streaming */
|
79
94
|
@serializable()
|
80
95
|
device: ScreenCaptureDevice = ScreenCaptureDevice.Screen;
|
81
96
|
|
82
97
|
get currentScream(): MediaStream | null {
|
83
98
|
return this._currentStream;
|
84
99
|
}
|
85
|
-
|
86
100
|
get currentMode(): ScreenCaptureMode {
|
87
101
|
return this._currentMode;
|
88
102
|
}
|
@@ -102,7 +116,9 @@
|
|
102
116
|
return false;
|
103
117
|
}
|
104
118
|
|
105
|
-
private
|
119
|
+
private get requiresVideoPlayer() { return this.device !== ScreenCaptureDevice.Microphone }
|
120
|
+
|
121
|
+
private _net?: NetworkedStreams;
|
106
122
|
private _requestOpen: boolean = false;
|
107
123
|
private _currentStream: MediaStream | null = null;
|
108
124
|
private _currentMode: ScreenCaptureMode = ScreenCaptureMode.Idle;
|
@@ -116,26 +132,36 @@
|
|
116
132
|
this.videoPlayer.setVideo(this._currentStream);
|
117
133
|
}
|
118
134
|
});
|
135
|
+
const handle = PeerHandle.getOrCreate(this.context, this.guid);
|
136
|
+
this._net = new NetworkedStreams(this.context, handle);
|
119
137
|
}
|
120
138
|
|
139
|
+
onEnable(): void {
|
140
|
+
this._net?.enable();
|
141
|
+
//@ts-ignore
|
142
|
+
this._net?.addEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream);
|
143
|
+
//@ts-ignore
|
144
|
+
this._net?.addEventListener(NetworkedStreamEvents.StreamEnded, this.onCallEnded);
|
145
|
+
this.context.connection.beginListen(RoomEvents.JoinedRoom, this.onJoinedRoom);
|
146
|
+
if (this.context.connection.isInRoom && this.autoConnect) {
|
147
|
+
this.share();
|
148
|
+
}
|
149
|
+
}
|
121
150
|
|
122
151
|
onDisable(): void {
|
152
|
+
//@ts-ignore
|
153
|
+
this._net?.removeEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream);
|
154
|
+
//@ts-ignore
|
155
|
+
this._net?.removeEventListener(NetworkedStreamEvents.StreamEnded, this.onCallEnded);
|
156
|
+
this.context.connection.stopListen(RoomEvents.JoinedRoom, this.onJoinedRoom);
|
157
|
+
this._net?.disable();
|
123
158
|
this.close();
|
124
159
|
}
|
125
160
|
|
126
|
-
|
127
|
-
if (!this.
|
128
|
-
this.
|
161
|
+
private onJoinedRoom = () => {
|
162
|
+
if (this.autoConnect && !this.isSending && !this.isReceiving) {
|
163
|
+
this.share();
|
129
164
|
}
|
130
|
-
if (!this.videoPlayer) {
|
131
|
-
console.warn("ScreenCapture: Requires an assigned VideoPlayer or a VideoPlayer component on the same object as this component.");
|
132
|
-
return;
|
133
|
-
}
|
134
|
-
const handle = PeerHandle.getOrCreate(this.context, this.guid);
|
135
|
-
this._net = new NetworkedVideo(this.context, handle);
|
136
|
-
this._net.enable();
|
137
|
-
//@ts-ignore
|
138
|
-
this._net.addEventListener(PeerEvent.ReceiveVideo, this.onReceiveVideo.bind(this));
|
139
165
|
}
|
140
166
|
|
141
167
|
/** Call to begin screensharing */
|
@@ -144,62 +170,94 @@
|
|
144
170
|
if (opts?.device)
|
145
171
|
this.device = opts.device;
|
146
172
|
|
173
|
+
if (!this.videoPlayer && this.requiresVideoPlayer) {
|
174
|
+
if (!this._videoPlayer) {
|
175
|
+
this._videoPlayer = GameObject.getComponent(this.gameObject, VideoPlayer) ?? undefined;
|
176
|
+
}
|
177
|
+
if (!this.videoPlayer) {
|
178
|
+
const vp = new VideoPlayer();
|
179
|
+
vp.aspectMode = AspectMode.AdjustWidth;
|
180
|
+
GameObject.addComponent(this.gameObject, vp);
|
181
|
+
this._videoPlayer = vp;
|
182
|
+
}
|
183
|
+
if (!this.videoPlayer) {
|
184
|
+
console.warn("Can not share video without a videoPlayer assigned");
|
185
|
+
return;
|
186
|
+
}
|
187
|
+
}
|
188
|
+
|
147
189
|
this._requestOpen = true;
|
148
190
|
try {
|
149
|
-
if (this.videoPlayer) {
|
150
191
|
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
192
|
+
const settings: MediaTrackConstraints = opts?.constraints ?? {
|
193
|
+
echoCancellation: true,
|
194
|
+
autoGainControl: false,
|
195
|
+
};
|
196
|
+
const displayMediaOptions: MediaStreamConstraints = {
|
197
|
+
video: settings,
|
198
|
+
audio: settings,
|
199
|
+
};
|
200
|
+
const videoOptions = displayMediaOptions.video;
|
201
|
+
if (videoOptions !== undefined && typeof videoOptions !== "boolean") {
|
202
|
+
// Set default video settings
|
203
|
+
if (!videoOptions.width)
|
204
|
+
videoOptions.width = { max: 1920 };
|
205
|
+
if (!videoOptions.height)
|
206
|
+
videoOptions.height = { max: 1920 };
|
207
|
+
if (!videoOptions.aspectRatio)
|
208
|
+
videoOptions.aspectRatio = { ideal: 1.7777777778 };
|
209
|
+
if (!videoOptions.frameRate)
|
210
|
+
videoOptions.frameRate = { ideal: 24 };
|
211
|
+
if (!videoOptions.facingMode)
|
212
|
+
videoOptions.facingMode = { ideal: "user" };
|
213
|
+
}
|
173
214
|
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
215
|
+
switch (this.device) {
|
216
|
+
// Capture a connected camera
|
217
|
+
case ScreenCaptureDevice.Camera:
|
218
|
+
this.tryShareUserCamera(displayMediaOptions, opts);
|
219
|
+
break;
|
179
220
|
|
180
|
-
|
181
|
-
|
221
|
+
// capture any screen, will show a popup
|
222
|
+
case ScreenCaptureDevice.Screen:
|
223
|
+
{
|
182
224
|
if (!navigator.mediaDevices.getDisplayMedia) {
|
183
225
|
console.error("No getDisplayMedia support");
|
184
226
|
return;
|
185
227
|
}
|
186
228
|
const myVideo = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
|
187
229
|
if (this._requestOpen) {
|
188
|
-
this.
|
230
|
+
this.setStream(myVideo, ScreenCaptureMode.Sending);
|
189
231
|
}
|
190
232
|
else disposeStream(myVideo);
|
191
|
-
|
233
|
+
}
|
234
|
+
break;
|
192
235
|
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
236
|
+
// capture the canvas meaning the threejs view
|
237
|
+
case ScreenCaptureDevice.Canvas:
|
238
|
+
// looks like this doesnt work reliably on chrome https://stackoverflow.com/a/66848674
|
239
|
+
// firefox updates fine
|
240
|
+
// https://bugs.chromium.org/p/chromium/issues/detail?id=1156408
|
241
|
+
const fps = 0;
|
242
|
+
const stream = this.context.renderer.domElement.captureStream(fps);
|
243
|
+
this.setStream(stream, ScreenCaptureMode.Sending);
|
244
|
+
break;
|
245
|
+
|
246
|
+
case ScreenCaptureDevice.Microphone:
|
247
|
+
{
|
248
|
+
if (!navigator.mediaDevices.getUserMedia) {
|
249
|
+
console.error("No getDisplayMedia support");
|
250
|
+
return;
|
251
|
+
}
|
252
|
+
displayMediaOptions.video = false;
|
253
|
+
const myStream = await navigator.mediaDevices.getUserMedia(displayMediaOptions);
|
254
|
+
if (this._requestOpen) {
|
255
|
+
this.setStream(myStream, ScreenCaptureMode.Sending);
|
256
|
+
}
|
257
|
+
else disposeStream(myStream);
|
258
|
+
}
|
259
|
+
break
|
260
|
+
|
203
261
|
}
|
204
262
|
} catch (err: any) {
|
205
263
|
if (err.name === "NotAllowedError") {
|
@@ -217,30 +275,56 @@
|
|
217
275
|
if (this._currentStream) {
|
218
276
|
if (debug)
|
219
277
|
console.warn("Close current stream / disposing resources, stream was active?", this._currentStream.active);
|
220
|
-
this._net?.
|
278
|
+
this._net?.stopSendingStream(this._currentStream);
|
221
279
|
disposeStream(this._currentStream);
|
222
280
|
this._currentMode = ScreenCaptureMode.Idle;
|
223
281
|
this._currentStream = null;
|
224
282
|
}
|
225
283
|
}
|
226
284
|
|
227
|
-
private
|
285
|
+
private setStream(stream: MediaStream, mode: ScreenCaptureMode) {
|
286
|
+
|
228
287
|
if (stream === this._currentStream) return;
|
288
|
+
|
229
289
|
this.close();
|
230
|
-
if (!stream
|
290
|
+
if (!stream) return;
|
291
|
+
|
231
292
|
this._currentStream = stream;
|
232
293
|
this._requestOpen = true;
|
233
294
|
this._currentMode = mode;
|
234
|
-
this.videoPlayer.setVideo(stream);
|
235
295
|
|
296
|
+
const isVideoStream = this.device !== ScreenCaptureDevice.Microphone;
|
236
297
|
const isSending = mode === ScreenCaptureMode.Sending;
|
298
|
+
|
299
|
+
if (isVideoStream) {
|
300
|
+
if (this._videoPlayer)
|
301
|
+
this._videoPlayer.setVideo(stream);
|
302
|
+
else console.error("No video player assigned for video stream");
|
303
|
+
}
|
304
|
+
else {
|
305
|
+
if (!this._audioSource) {
|
306
|
+
this._audioSource = new AudioSource();
|
307
|
+
this._audioSource.spatialBlend = 0;
|
308
|
+
this._audioSource.volume = 1;
|
309
|
+
this.gameObject.addComponent(this._audioSource);
|
310
|
+
}
|
311
|
+
if (!isSending) {
|
312
|
+
console.log("PLAY", stream.getAudioTracks())
|
313
|
+
this._audioSource.volume = 1;
|
314
|
+
this._audioSource?.play(stream);
|
315
|
+
}
|
316
|
+
}
|
317
|
+
|
237
318
|
if (isSending) {
|
238
|
-
this._net?.
|
319
|
+
this._net?.startSendingStream(stream);
|
239
320
|
}
|
240
321
|
|
241
322
|
// Mute audio for the video we are sending
|
242
|
-
if (
|
243
|
-
this.
|
323
|
+
if (isSending) {
|
324
|
+
if (this._videoPlayer)
|
325
|
+
this._videoPlayer.muted = true;
|
326
|
+
this._audioSource?.stop();
|
327
|
+
}
|
244
328
|
|
245
329
|
for (const track of stream.getTracks()) {
|
246
330
|
track.addEventListener("ended", () => {
|
@@ -260,10 +344,14 @@
|
|
260
344
|
|
261
345
|
}
|
262
346
|
|
263
|
-
private
|
347
|
+
private onReceiveStream = (evt: StreamReceivedEvent) => {
|
264
348
|
if (evt.stream?.active !== true) return;
|
265
|
-
this.
|
349
|
+
this.setStream(evt.stream, ScreenCaptureMode.Receiving);
|
266
350
|
}
|
351
|
+
private onCallEnded = (_evt: StreamEndedEvent) => {
|
352
|
+
if (debug) console.log("CALL ENDED", this.isReceiving, this?.screenspace)
|
353
|
+
if (this.isReceiving) this.screenspace = false;
|
354
|
+
}
|
267
355
|
|
268
356
|
|
269
357
|
|
@@ -298,7 +386,7 @@
|
|
298
386
|
}
|
299
387
|
const userMedia = await navigator.mediaDevices.getUserMedia(opts);
|
300
388
|
if (this._requestOpen) {
|
301
|
-
this.
|
389
|
+
this.setStream(userMedia, ScreenCaptureMode.Sending);
|
302
390
|
}
|
303
391
|
else disposeStream(userMedia);
|
304
392
|
if (debug)
|
@@ -324,431 +412,3 @@
|
|
324
412
|
// }
|
325
413
|
}
|
326
414
|
|
327
|
-
|
328
|
-
/////// PEER
|
329
|
-
|
330
|
-
enum PeerEvent {
|
331
|
-
Connected = "peer-user-connected",
|
332
|
-
ReceiveVideo = "receive-video",
|
333
|
-
Disconnected = "peer-user-disconnected",
|
334
|
-
UserJoined = "user-joined",
|
335
|
-
}
|
336
|
-
|
337
|
-
class ReceiveVideoEvent {
|
338
|
-
readonly type = PeerEvent.ReceiveVideo;
|
339
|
-
readonly stream: MediaStream;
|
340
|
-
readonly target: CallHandle;
|
341
|
-
constructor(stream: MediaStream, target: CallHandle) {
|
342
|
-
this.stream = stream
|
343
|
-
this.target = target;
|
344
|
-
}
|
345
|
-
}
|
346
|
-
|
347
|
-
class PeerUserConnectedModel implements IModel {
|
348
|
-
/** the peer handle id */
|
349
|
-
readonly guid: string;
|
350
|
-
readonly peerId: string;
|
351
|
-
// internal so server doesnt save it to persistent storage
|
352
|
-
readonly dontSave: boolean = true;
|
353
|
-
constructor(handle: PeerHandle, peerId: string) {
|
354
|
-
this.guid = handle.id;
|
355
|
-
this.peerId = peerId;
|
356
|
-
}
|
357
|
-
}
|
358
|
-
|
359
|
-
enum CallDirection {
|
360
|
-
Incoming = "incoming",
|
361
|
-
Outgoing = "outgoing",
|
362
|
-
}
|
363
|
-
|
364
|
-
class CallHandle extends EventDispatcher {
|
365
|
-
readonly userId: string;
|
366
|
-
readonly direction: CallDirection;
|
367
|
-
readonly call: MediaConnection;
|
368
|
-
get stream() { return this._stream; };
|
369
|
-
|
370
|
-
private _stream: MediaStream | null = null;
|
371
|
-
private _isDisposed: boolean = false;
|
372
|
-
|
373
|
-
close() {
|
374
|
-
if (this._isDisposed) return;
|
375
|
-
this._isDisposed = true;
|
376
|
-
this.call.close();
|
377
|
-
disposeStream(this._stream);
|
378
|
-
}
|
379
|
-
|
380
|
-
get isOpen() {
|
381
|
-
return this.call.peerConnection?.connectionState === "connected";// && this._stream?.active;
|
382
|
-
}
|
383
|
-
|
384
|
-
get isClosed() {
|
385
|
-
return !this.isOpen;
|
386
|
-
}
|
387
|
-
|
388
|
-
constructor(userId: string, call: MediaConnection, direction: CallDirection) {
|
389
|
-
super();
|
390
|
-
this.userId = userId;
|
391
|
-
this.call = call;
|
392
|
-
this.direction = direction;
|
393
|
-
this._stream = null;
|
394
|
-
call.on("stream", stream => {
|
395
|
-
if (debug)
|
396
|
-
console.log("Receive video", stream.getAudioTracks(), stream.getVideoTracks());
|
397
|
-
this._stream = stream;
|
398
|
-
if (direction === CallDirection.Incoming) {
|
399
|
-
const args: ReceiveVideoEvent = new ReceiveVideoEvent(stream, this);
|
400
|
-
this.dispatchEvent(args);
|
401
|
-
}
|
402
|
-
});
|
403
|
-
}
|
404
|
-
}
|
405
|
-
|
406
|
-
class PeerHandle extends EventDispatcher {
|
407
|
-
|
408
|
-
private static readonly instances: Map<string, PeerHandle> = new Map();
|
409
|
-
|
410
|
-
static getOrCreate(context: Context, guid: string): PeerHandle {
|
411
|
-
// if (id === undefined) {
|
412
|
-
// // randomId
|
413
|
-
// id = Math.random().toFixed(5);
|
414
|
-
// }
|
415
|
-
if (PeerHandle.instances.has(guid))
|
416
|
-
return PeerHandle.instances.get(guid)!;
|
417
|
-
const peer = new PeerHandle(context, guid);
|
418
|
-
PeerHandle.instances.set(guid, peer);
|
419
|
-
return peer;
|
420
|
-
}
|
421
|
-
|
422
|
-
getMyPeerId(): string | undefined {
|
423
|
-
if (this.context.connection.connectionId)
|
424
|
-
return this.getPeerIdFromUserId(this.context.connection.connectionId);
|
425
|
-
return undefined;
|
426
|
-
}
|
427
|
-
|
428
|
-
getPeerIdFromUserId(userConnectionId: string): string {
|
429
|
-
// we build the peer id ourselves so we dont need to wait for peer to report it
|
430
|
-
return this.id + "-" + userConnectionId;
|
431
|
-
}
|
432
|
-
|
433
|
-
getUserIdFromPeerId(peerId: string): string {
|
434
|
-
return peerId.substring(this.id.length + 1);
|
435
|
-
}
|
436
|
-
|
437
|
-
makeCall(peerId: string, stream: MediaStream): CallHandle | undefined {
|
438
|
-
const opts = { metadata: { userId: this.context.connection.connectionId } };
|
439
|
-
const call = this._peer?.call(peerId, stream, opts);
|
440
|
-
if (call)
|
441
|
-
return this.registerCall(call, CallDirection.Outgoing);
|
442
|
-
return undefined;
|
443
|
-
}
|
444
|
-
|
445
|
-
get peer(): Peer | undefined { return this._peer; }
|
446
|
-
|
447
|
-
readonly id: string;
|
448
|
-
readonly context: Context;
|
449
|
-
private _peer: Peer | undefined;
|
450
|
-
private _incomingCalls: CallHandle[] = [];
|
451
|
-
private _outgoingCalls: CallHandle[] = [];
|
452
|
-
|
453
|
-
private constructor(context: Context, id: string) {
|
454
|
-
super();
|
455
|
-
this.context = context;
|
456
|
-
this.id = id;
|
457
|
-
this.setupPeer();
|
458
|
-
navigator["getUserMedia"] = (
|
459
|
-
navigator["getUserMedia"] || navigator["webkitGetUserMedia"] ||
|
460
|
-
navigator["mozGetUserMedia"] || navigator["msGetUserMedia"]
|
461
|
-
);
|
462
|
-
}
|
463
|
-
|
464
|
-
private _enabled: boolean = false;
|
465
|
-
private _enabledPeer: boolean = false;
|
466
|
-
private onConnectRoomFn: Function = this.onConnectRoom.bind(this);
|
467
|
-
// private onUserJoinedOrLeftRoomFn: Function = this.onUserJoinedOrLeftRoom.bind(this);
|
468
|
-
private onPeerConnectFn: (id) => void = this.onPeerConnect.bind(this);
|
469
|
-
private onPeerReceiveCallFn: (call) => void = this.onPeerReceivingCall.bind(this);
|
470
|
-
// private _connectionPeerIdMap : Map<string, string> = new Map();
|
471
|
-
|
472
|
-
enable() {
|
473
|
-
if (this._enabled) return;
|
474
|
-
this._enabled = true;
|
475
|
-
this.context.connection.beginListen(RoomEvents.JoinedRoom, this.onConnectRoomFn);
|
476
|
-
// this.context.connection.beginListen(RoomEvents.UserJoinedRoom, this.onUserJoinedOrLeftRoomFn);
|
477
|
-
// this.context.connection.beginListen(RoomEvents.UserLeftRoom, this.onUserJoinedOrLeftRoomFn);
|
478
|
-
this.subscribePeerEvents();
|
479
|
-
}
|
480
|
-
|
481
|
-
disable() {
|
482
|
-
if (!this._enabled) return;
|
483
|
-
this._enabled = false;
|
484
|
-
this.context.connection.stopListen(RoomEvents.JoinedRoom, this.onConnectRoomFn);
|
485
|
-
// this.context.connection.stopListen(RoomEvents.UserJoinedRoom, this.onUserJoinedOrLeftRoomFn);
|
486
|
-
// this.context.connection.stopListen(RoomEvents.UserLeftRoom, this.onUserJoinedOrLeftRoomFn);
|
487
|
-
this.unsubscribePeerEvents();
|
488
|
-
}
|
489
|
-
|
490
|
-
private onConnectRoom(): void {
|
491
|
-
this.setupPeer();
|
492
|
-
};
|
493
|
-
|
494
|
-
// private onUserJoinedOrLeftRoom(_: UserJoinedOrLeftRoomModel): void {
|
495
|
-
// };
|
496
|
-
|
497
|
-
private setupPeer() {
|
498
|
-
if (!this.context.connection.connectionId) return;
|
499
|
-
if (this._enabledPeer) return;
|
500
|
-
this._enabledPeer = true;
|
501
|
-
if (!this._peer) {
|
502
|
-
const peerId = this.getMyPeerId();
|
503
|
-
if (peerId)
|
504
|
-
this._peer = getPeerjsInstance(peerId);
|
505
|
-
else console.error("Failed to setup peerjs because we dont have a connection id", this.context.connection.connectionId);
|
506
|
-
}
|
507
|
-
if (this._enabled)
|
508
|
-
this.subscribePeerEvents();
|
509
|
-
}
|
510
|
-
|
511
|
-
private subscribePeerEvents() {
|
512
|
-
if (!this._peer) return;
|
513
|
-
this._peer.on("open", this.onPeerConnectFn);
|
514
|
-
this._peer.on("call", this.onPeerReceiveCallFn);
|
515
|
-
// this.context.connection.beginListen(PeerEvent.Connected, this.onRemotePeerConnect.bind(this));
|
516
|
-
// TODO: make connection to all current active calls even if the user is not anymore in the needle room
|
517
|
-
}
|
518
|
-
|
519
|
-
private unsubscribePeerEvents() {
|
520
|
-
if (!this._peer) return;
|
521
|
-
this._peer.off("open", this.onPeerConnectFn);
|
522
|
-
this._peer.off("call", this.onPeerReceiveCallFn);
|
523
|
-
// this.context.connection.stopListen(PeerEvent.Connected, this.onRemotePeerConnect.bind(this));
|
524
|
-
}
|
525
|
-
|
526
|
-
private onPeerConnect(id): void {
|
527
|
-
if (debug)
|
528
|
-
console.log("Peer connected as", id);
|
529
|
-
this.context.connection.send(PeerEvent.Connected, new PeerUserConnectedModel(this, id));
|
530
|
-
}
|
531
|
-
|
532
|
-
private onPeerReceivingCall(call: MediaConnection): void {
|
533
|
-
call.answer();
|
534
|
-
this.registerCall(call, CallDirection.Incoming);
|
535
|
-
}
|
536
|
-
|
537
|
-
private registerCall(call: MediaConnection, direction: CallDirection): CallHandle {
|
538
|
-
|
539
|
-
const meta = call.metadata;
|
540
|
-
if (!meta || !meta.userId) {
|
541
|
-
console.error("Missing call metadata", call);
|
542
|
-
}
|
543
|
-
const userId = meta.userId;
|
544
|
-
|
545
|
-
if (direction === CallDirection.Incoming && debug) console.log("Receive call from", call.metadata);
|
546
|
-
else if (debug) console.log("Make call to", call.metadata);
|
547
|
-
|
548
|
-
const arr = direction === CallDirection.Incoming ? this._incomingCalls : this._outgoingCalls;
|
549
|
-
const handle = new CallHandle(userId, call, direction);
|
550
|
-
arr.push(handle);
|
551
|
-
call.on("error", err => {
|
552
|
-
console.error("Call error", err);
|
553
|
-
});
|
554
|
-
call.on("close", () => {
|
555
|
-
if (debug)
|
556
|
-
console.log("Call ended", call.metadata);
|
557
|
-
call.close();
|
558
|
-
const index = arr.indexOf(handle);
|
559
|
-
if (index !== -1)
|
560
|
-
arr.splice(index, 1);
|
561
|
-
});
|
562
|
-
|
563
|
-
if (direction === CallDirection.Incoming) {
|
564
|
-
|
565
|
-
handle.addEventListener(PeerEvent.ReceiveVideo, e => {
|
566
|
-
this.dispatchEvent(e);
|
567
|
-
});
|
568
|
-
|
569
|
-
call.on("stream", () => {
|
570
|
-
// workaround for https://github.com/peers/peerjs/issues/636
|
571
|
-
let intervalCounter = 0;
|
572
|
-
const closeInterval = setInterval(() => {
|
573
|
-
const isFirstInterval = intervalCounter === 0;
|
574
|
-
if (!handle.isOpen && isFirstInterval) {
|
575
|
-
intervalCounter += 1;
|
576
|
-
clearInterval(closeInterval);
|
577
|
-
handle.close();
|
578
|
-
}
|
579
|
-
}, 2000);
|
580
|
-
});
|
581
|
-
}
|
582
|
-
return handle;
|
583
|
-
}
|
584
|
-
|
585
|
-
// private onRemotePeerConnect(user: PeerUserConnectedModel) {
|
586
|
-
// console.log("other user connected", user);
|
587
|
-
// }
|
588
|
-
}
|
589
|
-
|
590
|
-
|
591
|
-
// type UserVideoCall = {
|
592
|
-
// call: Peer.MediaConnection;
|
593
|
-
// stream: MediaStream;
|
594
|
-
// userId: string;
|
595
|
-
// }
|
596
|
-
|
597
|
-
// type IncomingStreamArgs = {
|
598
|
-
// stream: MediaStream;
|
599
|
-
// userId: string;
|
600
|
-
// }
|
601
|
-
|
602
|
-
class NetworkedVideo extends EventDispatcher {
|
603
|
-
|
604
|
-
private readonly context: Context;
|
605
|
-
private readonly peer: PeerHandle;
|
606
|
-
|
607
|
-
// private _receiveVideoStreamListeners: Array<(info: IncomingStreamArgs) => void> = [];
|
608
|
-
private _sendingVideoStreams: Map<MediaStream, CallHandle[]> = new Map();
|
609
|
-
|
610
|
-
constructor(context: Context, peer: PeerHandle) {
|
611
|
-
super();
|
612
|
-
this.context = context;
|
613
|
-
this.peer = peer;
|
614
|
-
}
|
615
|
-
|
616
|
-
startSendingVideo(stream: MediaStream) {
|
617
|
-
if (!this._sendingVideoStreams.has(stream)) {
|
618
|
-
this._sendingVideoStreams.set(stream, []);
|
619
|
-
this.updateSendingCalls();
|
620
|
-
};
|
621
|
-
}
|
622
|
-
|
623
|
-
stopSendingVideo(_steam: MediaStream | undefined | null) {
|
624
|
-
if (_steam) {
|
625
|
-
const calls = this._sendingVideoStreams.get(_steam);
|
626
|
-
if (calls) {
|
627
|
-
if (debug)
|
628
|
-
console.log("Closing calls", calls);
|
629
|
-
for (const call of calls) {
|
630
|
-
call.close();
|
631
|
-
}
|
632
|
-
}
|
633
|
-
this._sendingVideoStreams.delete(_steam);
|
634
|
-
if (calls && debug)
|
635
|
-
console.log("Currently sending", this._sendingVideoStreams);
|
636
|
-
}
|
637
|
-
}
|
638
|
-
|
639
|
-
// private onConnectRoomFn: Function = this.onConnectRoom.bind(this);
|
640
|
-
// private onUserConnectedFn: Function = this.onUserConnected.bind(this);
|
641
|
-
// private onUserLeftFn: Function = this.onUserLeft.bind(this);
|
642
|
-
|
643
|
-
enable() {
|
644
|
-
this.peer.enable();
|
645
|
-
this.peer.addEventListener(PeerEvent.ReceiveVideo, this.onReceiveVideo);
|
646
|
-
// this.peer.addEventListener(PeerEvent.UserJoined, this.onUserJoinedPeer);
|
647
|
-
this.context.connection.beginListen(PeerEvent.Connected, this.onUserConnected);
|
648
|
-
this.context.connection.beginListen(RoomEvents.JoinedRoom, this.onJoinedRoom);
|
649
|
-
this.context.connection.beginListen(RoomEvents.UserJoinedRoom, this.onJoinedRoom);
|
650
|
-
this.context.connection.beginListen(RoomEvents.UserLeftRoom, this.onUserLeft);
|
651
|
-
}
|
652
|
-
|
653
|
-
disable() {
|
654
|
-
this.peer.disable();
|
655
|
-
this.peer.removeEventListener(PeerEvent.ReceiveVideo, this.onReceiveVideo);
|
656
|
-
// this.peer.removeEventListener(PeerEvent.UserJoined, this.onUserJoinedPeer);
|
657
|
-
this.context.connection.stopListen(PeerEvent.Connected, this.onUserConnected);
|
658
|
-
this.context.connection.stopListen(RoomEvents.JoinedRoom, this.onJoinedRoom);
|
659
|
-
this.context.connection.stopListen(RoomEvents.UserJoinedRoom, this.onJoinedRoom);
|
660
|
-
this.context.connection.stopListen(RoomEvents.UserLeftRoom, this.onUserLeft);
|
661
|
-
}
|
662
|
-
|
663
|
-
// private onUserJoinedPeer = (evt) => {
|
664
|
-
// if (!this.context.connection.isConnected && evt.userId) {
|
665
|
-
// this.startCallWithUserIfNotAlready(evt.userId);
|
666
|
-
// }
|
667
|
-
// }
|
668
|
-
|
669
|
-
// When either we ourselves OR someone else is joining the room we want to make sure to re-establish all calls
|
670
|
-
// and if the user that joined is not yet receiving our video stream we want to start a stream with them
|
671
|
-
// https://github.com/needle-tools/needle-tiny/issues/697#issuecomment-1510425539
|
672
|
-
private onJoinedRoom = (evt) => {
|
673
|
-
if (debug) console.log(`${evt.userId} joined room and I'm currently sending ${this._sendingVideoStreams.size} streams`);
|
674
|
-
if (this._sendingVideoStreams.size > 0)
|
675
|
-
this.updateSendingCalls();
|
676
|
-
}
|
677
|
-
|
678
|
-
private onReceiveVideo = (evt) => {
|
679
|
-
if (debug)
|
680
|
-
console.log("RECEIVE VIDEO", evt);
|
681
|
-
this.dispatchEvent({ type: PeerEvent.ReceiveVideo, target: this, stream: evt.stream, userId: evt.userId });
|
682
|
-
}
|
683
|
-
|
684
|
-
private onUserConnected = (user: PeerUserConnectedModel) => {
|
685
|
-
// console.log(this.peer.id, user.guid)
|
686
|
-
if (this.peer.id === user.guid) {
|
687
|
-
if (debug)
|
688
|
-
console.log("USER CONNECTED", user.guid, user);
|
689
|
-
const stream = this._sendingVideoStreams.keys().next().value;
|
690
|
-
this.peer.makeCall(user.peerId, stream);
|
691
|
-
}
|
692
|
-
}
|
693
|
-
|
694
|
-
private onUserLeft(_: UserJoinedOrLeftRoomModel) {
|
695
|
-
this.stopCallsToUsersThatAreNotInTheRoomAnymore();
|
696
|
-
}
|
697
|
-
|
698
|
-
private updateSendingCalls() {
|
699
|
-
let startedNewCall = false;
|
700
|
-
for (const stream of this._sendingVideoStreams.keys()) {
|
701
|
-
const calls = this._sendingVideoStreams.get(stream) || [];
|
702
|
-
for (const userId of this.context.connection.usersInRoom()) {
|
703
|
-
if (userId === this.context.connection.connectionId) continue;
|
704
|
-
const existing = calls.find(c => c.userId === userId);
|
705
|
-
if (!existing || existing.stream?.active === false) {
|
706
|
-
if (debug) console.log("Starting call to", userId)
|
707
|
-
const handle = this.peer.makeCall(this.peer.getPeerIdFromUserId(userId), stream);
|
708
|
-
if (handle) {
|
709
|
-
startedNewCall = true;
|
710
|
-
calls.push(handle);
|
711
|
-
}
|
712
|
-
}
|
713
|
-
}
|
714
|
-
|
715
|
-
this._sendingVideoStreams.set(stream, calls);
|
716
|
-
}
|
717
|
-
this.stopCallsToUsersThatAreNotInTheRoomAnymore();
|
718
|
-
if (startedNewCall && debug) {
|
719
|
-
console.log("Currently sending", this._sendingVideoStreams);
|
720
|
-
}
|
721
|
-
}
|
722
|
-
|
723
|
-
// private startCallWithUserIfNotAlready(userId: string) {
|
724
|
-
// for (const stream of this._sendingVideoStreams.keys()) {
|
725
|
-
// const calls = this._sendingVideoStreams.get(stream) || [];
|
726
|
-
// const existing = calls.find(c => c.userId === userId);
|
727
|
-
// if (!existing || existing.stream?.active === false) {
|
728
|
-
// if (debug) console.log("Starting call to", userId)
|
729
|
-
// const handle = this.peer.makeCall(this.peer.getPeerIdFromUserId(userId), stream);
|
730
|
-
// if (handle) {
|
731
|
-
// calls.push(handle);
|
732
|
-
// return true;
|
733
|
-
// }
|
734
|
-
// }
|
735
|
-
// }
|
736
|
-
// return false;
|
737
|
-
// }
|
738
|
-
|
739
|
-
private stopCallsToUsersThatAreNotInTheRoomAnymore() {
|
740
|
-
for (const stream of this._sendingVideoStreams.keys()) {
|
741
|
-
const calls = this._sendingVideoStreams.get(stream);
|
742
|
-
if (!calls) continue;
|
743
|
-
for (let i = calls.length - 1; i >= 0; i--) {
|
744
|
-
const call = calls[i];
|
745
|
-
if (!this.context.connection.userIsInRoom(call.userId)) {
|
746
|
-
call.close();
|
747
|
-
calls.splice(i, 1);
|
748
|
-
}
|
749
|
-
}
|
750
|
-
}
|
751
|
-
}
|
752
|
-
|
753
|
-
// const call = peer.call(peerId, stream);
|
754
|
-
}
|
@@ -1,7 +1,7 @@
|
|
1
|
-
import { BlendFunction, DepthDownsamplingPass,
|
2
|
-
import { Color,
|
1
|
+
import { BlendFunction, DepthDownsamplingPass, NormalPass, SSAOEffect } from "postprocessing";
|
2
|
+
import { Color, PerspectiveCamera } from "three";
|
3
3
|
import { serializable } from "../../../engine/engine_serialization.js";
|
4
|
-
import { EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
4
|
+
import { type EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
5
5
|
import { VolumeParameter } from "../VolumeParameter.js";
|
6
6
|
import { registerCustomEffectType } from "../VolumeProfile.js";
|
7
7
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { Color, NeverDepth, PerspectiveCamera } from "three";
|
2
2
|
import { serializable } from "../../../engine/engine_serialization.js";
|
3
|
-
import { EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
3
|
+
import { type EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
4
4
|
import { VolumeParameter } from "../VolumeParameter.js";
|
5
5
|
import { registerCustomEffectType } from "../VolumeProfile.js";
|
6
6
|
import { N8AOPostPass } from "n8ao";
|
@@ -13,8 +13,8 @@
|
|
13
13
|
import { PlayerView, ViewDevice } from "../engine/engine_playerview.js";
|
14
14
|
import { RaycastOptions } from "../engine/engine_physics.js";
|
15
15
|
import { RoomEvents } from "../engine/engine_networking.js";
|
16
|
-
import { ICamera } from "../engine/engine_types.js";
|
17
|
-
import { IModel } from "../engine/engine_networking_types.js";
|
16
|
+
import type { ICamera } from "../engine/engine_types.js";
|
17
|
+
import type { IModel } from "../engine/engine_networking_types.js";
|
18
18
|
import { serializable } from "../engine/engine_serialization.js";
|
19
19
|
|
20
20
|
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import { Behaviour } from "./Component.js";
|
2
2
|
import * as THREE from "three";
|
3
3
|
import { serializable, serializeable } from "../engine/engine_serialization_decorator.js";
|
4
|
-
import {
|
4
|
+
import { Material, NearestFilter, Texture } from "three";
|
5
5
|
import { RGBAColor } from "./js-extensions/RGBAColor.js";
|
6
6
|
import { getParam } from "../engine/engine_utils.js";
|
7
7
|
|
@@ -14,6 +14,7 @@
|
|
14
14
|
import { AssetReference } from "../engine/engine_addressables.js";
|
15
15
|
import { ViewDevice } from "../engine/engine_playerview.js";
|
16
16
|
import { InstantiateOptions } from "../engine/engine_gameobject.js";
|
17
|
+
import { isDevEnvironment } from "../engine/debug/index.js";
|
17
18
|
|
18
19
|
const SyncedCameraModelIdentifier = "SCAM";
|
19
20
|
registerBinaryType(SyncedCameraModelIdentifier, SyncedCameraModel.getRootAsSyncedCameraModel);
|
@@ -115,7 +116,7 @@
|
|
115
116
|
const cam = this.remoteCams[guid];
|
116
117
|
const timeDiff = this.context.time.realtimeSinceStartup - cam.lastUpdate;
|
117
118
|
if (!cam || (timeDiff) > this._camTimeoutInSeconds) {
|
118
|
-
console.log("Remote cam timeout",
|
119
|
+
if (isDevEnvironment()) console.log("Remote cam timeout", guid);
|
119
120
|
if (cam?.obj) {
|
120
121
|
GameObject.destroy(cam.obj);
|
121
122
|
}
|
@@ -5,7 +5,7 @@
|
|
5
5
|
import { SyncedTransformModel } from "../engine-schemes/synced-transform-model.js";
|
6
6
|
import { Rigidbody } from "./RigidBody.js";
|
7
7
|
import { Vector3 } from "three";
|
8
|
-
import { IModel } from "../engine/engine_networking_types.js";
|
8
|
+
import type { IModel } from "../engine/engine_networking_types.js";
|
9
9
|
|
10
10
|
export class TestRunner extends Behaviour {
|
11
11
|
awake(): void {
|
@@ -1,12 +1,12 @@
|
|
1
1
|
import { Graphic } from './Graphic.js';
|
2
2
|
import * as ThreeMeshUI from 'three-mesh-ui'
|
3
|
-
import { DocumentedOptions as ThreeMeshUIEveryOptions } from "three-mesh-ui/build/types/core/elements/MeshUIBaseElement.js";
|
3
|
+
import type { DocumentedOptions as ThreeMeshUIEveryOptions } from "three-mesh-ui/build/types/core/elements/MeshUIBaseElement.js";
|
4
4
|
import { Color } from 'three';
|
5
5
|
import { updateRenderSettings } from './Utils.js';
|
6
6
|
import { Canvas } from './Canvas.js';
|
7
7
|
import { serializable } from '../../engine/engine_serialization_decorator.js';
|
8
|
-
import { getParam
|
9
|
-
import { ICanvas, ICanvasEventReceiver, IHasAlphaFactor } from './Interfaces.js';
|
8
|
+
import { getParam } from '../../engine/engine_utils.js';
|
9
|
+
import { type ICanvas, type ICanvasEventReceiver, type IHasAlphaFactor } from './Interfaces.js';
|
10
10
|
|
11
11
|
const debug = getParam("debugtext");
|
12
12
|
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import { registerCustomEffectType } from "../VolumeProfile.js";
|
2
|
-
import { EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
2
|
+
import { type EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
3
3
|
import { KernelSize, TiltShiftEffect as TiltShift } from "postprocessing";
|
4
4
|
import { VolumeParameter } from "../VolumeParameter.js";
|
5
5
|
import { serializable } from "../../../engine/engine_serialization.js";
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { ACESFilmicToneMapping, LinearToneMapping, NoToneMapping, ReinhardToneMapping } from "three";
|
2
2
|
import { serializable } from "../../../engine/engine_serialization.js";
|
3
|
-
import { EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
3
|
+
import { type EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
4
4
|
import { VolumeParameter } from "../VolumeParameter.js";
|
5
5
|
import { registerCustomEffectType } from "../VolumeProfile.js";
|
6
6
|
|
@@ -2,7 +2,7 @@
|
|
2
2
|
import { SyncedTransform } from "./SyncedTransform.js";
|
3
3
|
import { serializable } from "../engine/engine_serialization_decorator.js";
|
4
4
|
import * as params from "../engine/engine_default_parameters.js";
|
5
|
-
import { Mesh, MathUtils
|
5
|
+
import { Mesh, MathUtils } from "three";
|
6
6
|
import { TransformControls } from "three/examples/jsm/controls/TransformControls.js";
|
7
7
|
import { OrbitControls } from "./OrbitControls.js";
|
8
8
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
|
2
|
-
import { GLTF, GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
3
|
-
import {
|
2
|
+
import { type GLTF, type GLTFLoaderPlugin, GLTFParser } from "three/examples/jsm/loaders/GLTFLoader.js";
|
3
|
+
import { Mesh, Object3D } from "three";
|
4
4
|
import { getParam } from "../engine_utils.js";
|
5
5
|
|
6
6
|
|
@@ -5,7 +5,7 @@
|
|
5
5
|
import { ensureQuicklookLinkIsCreated } from "./utils/quicklook.js";
|
6
6
|
import { getFormattedDate } from "./utils/timeutils.js";
|
7
7
|
import { registerAnimatorsImplictly } from "./utils/animationutils.js";
|
8
|
-
import { IUSDExporterExtension } from "./Extension.js";
|
8
|
+
import type { IUSDExporterExtension } from "./Extension.js";
|
9
9
|
import { Behaviour, GameObject } from "../../Component.js";
|
10
10
|
import { WebXR } from "../../webxr/WebXR.js"
|
11
11
|
import { serializable } from "../../../engine/engine_serialization.js";
|
@@ -1,5 +1,5 @@
|
|
1
|
-
import { IUSDExporterExtension } from "../Extension.js";
|
2
|
-
import { IBehaviorElement } from "../extensions/behavior/BehavioursBuilder.js";
|
1
|
+
import type { IUSDExporterExtension } from "../Extension.js";
|
2
|
+
import type { IBehaviorElement } from "../extensions/behavior/BehavioursBuilder.js";
|
3
3
|
import { USDDocument, USDObject, USDWriter, USDZExporterContext } from "../ThreeUSDZExporter.js";
|
4
4
|
import { GameObject } from "../../../Component.js";
|
5
5
|
import { Text } from "../../../ui/Text.js"
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { IUSDExporterExtension } from "../Extension.js";
|
1
|
+
import type { IUSDExporterExtension } from "../Extension.js";
|
2
2
|
import { USDObject, USDZExporterContext } from "../ThreeUSDZExporter.js";
|
3
3
|
import { GameObject } from "../../../Component.js";
|
4
4
|
import { Canvas } from "../../../ui/Canvas.js";
|
@@ -219,6 +219,7 @@
|
|
219
219
|
|
220
220
|
onDisable(): void {
|
221
221
|
window.removeEventListener('visibilitychange', this.visibilityChanged);
|
222
|
+
this._overlay?.stop();
|
222
223
|
this.pause();
|
223
224
|
}
|
224
225
|
|
@@ -416,13 +417,16 @@
|
|
416
417
|
target["material"] = this._videoMaterial;
|
417
418
|
}
|
418
419
|
|
420
|
+
const fieldName = "map";
|
421
|
+
const videoMaterial = this._videoMaterial as any;
|
422
|
+
|
419
423
|
if (!this.targetMaterialProperty) {
|
420
|
-
|
424
|
+
videoMaterial[fieldName] = this._videoTexture;
|
421
425
|
}
|
422
426
|
else {
|
423
427
|
switch (this.targetMaterialProperty) {
|
424
428
|
default:
|
425
|
-
|
429
|
+
videoMaterial[fieldName] = this._videoTexture;
|
426
430
|
break;
|
427
431
|
// doesnt render:
|
428
432
|
// case "emissiveTexture":
|
@@ -816,7 +820,11 @@
|
|
816
820
|
if(vUv.x < 0. || vUv.x > 1. || vUv.y < 0. || vUv.y > 1.)
|
817
821
|
gl_FragColor = vec4(0., 0., 0., 1.);
|
818
822
|
else
|
819
|
-
|
823
|
+
{
|
824
|
+
vec4 texcolor = texture2D(map, vUv);
|
825
|
+
texcolor = LinearTosRGB(texcolor);
|
826
|
+
gl_FragColor = texcolor;
|
827
|
+
}
|
820
828
|
}
|
821
829
|
`
|
822
830
|
}
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { serializable } from "../../../engine/engine_serialization.js";
|
2
2
|
import { VolumeParameter } from "../VolumeParameter.js";
|
3
|
-
import { EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
3
|
+
import { type EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect.js";
|
4
4
|
import { registerCustomEffectType } from "../VolumeProfile.js";
|
5
5
|
import { VignetteEffect } from "postprocessing";
|
6
6
|
|
@@ -1,487 +1,215 @@
|
|
1
|
-
|
2
1
|
import { Behaviour } from "./Component.js";
|
3
|
-
import
|
4
|
-
import {
|
5
|
-
import
|
6
|
-
import {
|
7
|
-
import
|
2
|
+
import { StreamEndedEvent, NetworkedStreamEvents, NetworkedStreams, StreamReceivedEvent, disposeStream } from "../engine/engine_networking_streams.js"
|
3
|
+
import { serializable } from "../engine/engine_serialization_decorator.js";
|
4
|
+
import { getParam, microphonePermissionsGranted } from "../engine/engine_utils.js";
|
5
|
+
import { RoomEvents } from "../engine/engine_networking.js";
|
6
|
+
import { delay } from "../engine/engine_utils.js";
|
7
|
+
import { isDevEnvironment, showBalloonError, showBalloonWarning } from "../engine/debug/index.js";
|
8
8
|
import { AudioAnalyser } from "three";
|
9
|
-
import { SendQueue } from "../engine/engine_networking_types.js";
|
10
|
-
import { getPeerjsInstance } from "../engine/engine_networking_peer.js";
|
11
9
|
|
12
10
|
export const noVoip = "noVoip";
|
13
|
-
const
|
14
|
-
const allowVoip = utils.getParam("voip");
|
11
|
+
const debugParam = getParam("debugvoip");
|
15
12
|
|
16
|
-
|
17
|
-
Update_ID = "peer-update-id",
|
18
|
-
}
|
13
|
+
export class Voip extends Behaviour {
|
19
14
|
|
20
|
-
|
21
|
-
|
15
|
+
/** When enabled VOIP will start when this component becomes enabled */
|
16
|
+
@serializable()
|
17
|
+
autoConnect: boolean = false;
|
22
18
|
|
23
|
-
|
24
|
-
|
25
|
-
}
|
26
|
-
}
|
19
|
+
@serializable()
|
20
|
+
runInBackground: boolean = true;
|
27
21
|
|
28
|
-
|
29
|
-
id: string; // user id
|
30
|
-
peerId: string; // peer id
|
31
|
-
}
|
22
|
+
debug: boolean = false;
|
32
23
|
|
33
|
-
|
34
|
-
None = 0,
|
35
|
-
Errors = 1,
|
36
|
-
ErrorsAndWarnings = 2,
|
37
|
-
All = 3
|
38
|
-
}
|
24
|
+
private _net!: NetworkedStreams;
|
39
25
|
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
this.voip = voip;
|
51
|
-
this.peer = peer;
|
52
|
-
this.userId = userId;
|
53
|
-
this.peerId = peerId;
|
26
|
+
awake() {
|
27
|
+
if (debugParam) this.debug = true;
|
28
|
+
if (this.debug) {
|
29
|
+
window.addEventListener("keydown", async (evt) => {
|
30
|
+
if (evt.key === "v") {
|
31
|
+
console.log("MUTE?", !this.isMuted)
|
32
|
+
this.setMuted(!this.isMuted);
|
33
|
+
}
|
34
|
+
});
|
35
|
+
}
|
54
36
|
}
|
55
37
|
|
56
|
-
|
57
|
-
if (
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
this.
|
64
|
-
|
65
|
-
|
38
|
+
onEnable(): void {
|
39
|
+
if (!this._net) this._net = NetworkedStreams.create(this);
|
40
|
+
// this._net.debug = this.debug;
|
41
|
+
//@ts-ignore
|
42
|
+
this._net.addEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream);
|
43
|
+
//@ts-ignore
|
44
|
+
this._net.addEventListener(NetworkedStreamEvents.StreamEnded, this.onStreamEnded)
|
45
|
+
this._net.enable();
|
46
|
+
if (this.autoConnect) {
|
47
|
+
if (this.context.connection.isConnected)
|
48
|
+
this.connect();
|
49
|
+
else {
|
50
|
+
this.context.connection.beginListen(RoomEvents.JoinedRoom, this.onJoinedRoom);
|
51
|
+
}
|
52
|
+
}
|
53
|
+
this.onEnabledChanged();
|
54
|
+
|
55
|
+
window.addEventListener("visibilitychange", this.onVisibilityChanged);
|
66
56
|
}
|
57
|
+
onDisable(): void {
|
58
|
+
this._net.stopSendingStream(this._outputStream);
|
59
|
+
//@ts-ignore
|
60
|
+
this._net.removeEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream);
|
61
|
+
//@ts-ignore
|
62
|
+
this._net.removeEventListener(NetworkedStreamEvents.StreamEnded, this.onStreamEnded)
|
63
|
+
this._net?.disable();
|
64
|
+
this.context.connection.stopListen(RoomEvents.JoinedRoom, this.onJoinedRoom);
|
65
|
+
this.onEnabledChanged();
|
67
66
|
|
68
|
-
|
69
|
-
if (!this.stream) return;
|
70
|
-
const tracks = this.stream?.getAudioTracks();
|
71
|
-
for (const track of tracks) {
|
72
|
-
track.enabled = !mute;
|
73
|
-
}
|
67
|
+
window.removeEventListener("visibilitychange", this.onVisibilityChanged);
|
74
68
|
}
|
75
69
|
|
76
|
-
|
70
|
+
private _outputStream: MediaStream | null = null;
|
77
71
|
|
78
|
-
|
79
|
-
if (!res) {
|
80
|
-
console.warn("no permission to use microphone, can not start call");
|
81
|
-
return;
|
82
|
-
}
|
72
|
+
get isSending() { return this._outputStream != null && this._outputStream.active; }
|
83
73
|
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
74
|
+
/** Start sending audio */
|
75
|
+
async connect(audioSource?: MediaTrackConstraints) {
|
76
|
+
if (!this.context.connection.isConnected) {
|
77
|
+
console.error("Cannot connect to voice chat - not connected to server");
|
78
|
+
return false;
|
88
79
|
}
|
89
|
-
|
90
|
-
console.error(
|
91
|
-
return;
|
80
|
+
else if (!await microphonePermissionsGranted()) {
|
81
|
+
console.error("Cannot connect to voice chat - microphone permissions not granted");
|
82
|
+
return false;
|
92
83
|
}
|
93
|
-
this.updateMute(this.voip.muteOutput);
|
94
|
-
if (debug)
|
95
|
-
console.log(this.stream)
|
96
|
-
this.call = this.peer.call(this.peerId, this.stream, { metadata: { userId: this.userId } });
|
97
|
-
this.call.on("error", err => {
|
98
|
-
console.error(err);
|
99
|
-
});
|
100
|
-
this.call.on("stream", remoteStream => {
|
101
|
-
if (debug)
|
102
|
-
console.log("received stream from remote again", remoteStream);
|
103
|
-
// const ic = new AudioConnection(this.voip.gameObject, this.call).openAudioStream(remoteStream);
|
104
|
-
});
|
105
|
-
//@ts-ignore - ignore overload error
|
106
|
-
this.peer.on("close", this.onCallClose.bind(this));
|
107
84
|
|
108
|
-
this.
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
this.stream = null;
|
85
|
+
this._net.stopSendingStream(this._outputStream);
|
86
|
+
disposeStream(this._outputStream);
|
87
|
+
this._outputStream = await this.getAudioStream(audioSource);
|
88
|
+
if (this._outputStream) {
|
89
|
+
this._net.startSendingStream(this._outputStream);
|
90
|
+
return true;
|
91
|
+
}
|
92
|
+
else {
|
93
|
+
if (!await microphonePermissionsGranted()) {
|
94
|
+
showBalloonError("Microphone permissions not granted: Please grant microphone permissions to use voice chat");
|
119
95
|
}
|
120
|
-
else
|
121
|
-
console.error(err)
|
96
|
+
else console.error("VOIP: Could not get audio stream - please make sure to connect an audio device and grant microphone permissions");
|
122
97
|
}
|
123
|
-
|
124
|
-
this.peer.on("error", this.callErrorListener);
|
98
|
+
return false;
|
125
99
|
}
|
126
100
|
|
127
|
-
|
128
|
-
|
129
|
-
|
101
|
+
/** Stop sending audio */
|
102
|
+
disconnect() {
|
103
|
+
this._net.stopSendingStream(this._outputStream);
|
104
|
+
disposeStream(this._outputStream);
|
105
|
+
this._outputStream = null;
|
130
106
|
}
|
131
|
-
}
|
132
107
|
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
public get currentAudio(): THREE.Audio | null {
|
139
|
-
return this.audio;
|
140
|
-
}
|
141
|
-
|
142
|
-
public get currentAnalyzer(): THREE.AudioAnalyser | null { return this.analyzer; }
|
143
|
-
|
144
|
-
private voip: Voip;
|
145
|
-
private call: MediaConnection;
|
146
|
-
private audio: THREE.Audio | null = null;
|
147
|
-
private stream: MediaStream | null = null;
|
148
|
-
private obj: THREE.Object3D;
|
149
|
-
private analyzer: THREE.AudioAnalyser | null = null;
|
150
|
-
|
151
|
-
private waitingForStart: boolean = false;
|
152
|
-
private closed: boolean = false;
|
153
|
-
private audioElement: HTMLAudioElement | null = null;
|
154
|
-
|
155
|
-
public constructor(voip: Voip, obj: THREE.Object3D, call: MediaConnection) {
|
156
|
-
this.voip = voip;
|
157
|
-
this.obj = obj;
|
158
|
-
this.call = call;
|
159
|
-
}
|
160
|
-
|
161
|
-
|
162
|
-
public openAudioStream(stream: MediaStream) {
|
163
|
-
const tracks = stream.getAudioTracks();
|
164
|
-
for (const track of tracks) {
|
165
|
-
if (track.kind === "audio" && track.readyState === "live") {
|
166
|
-
this.open(track);
|
167
|
-
return;
|
108
|
+
setMuted(mute: boolean) {
|
109
|
+
const audio = this._outputStream?.getAudioTracks();
|
110
|
+
if (audio) {
|
111
|
+
for (const track of audio) {
|
112
|
+
track.enabled = !mute
|
168
113
|
}
|
169
114
|
}
|
170
|
-
console.warn("failed finding valid audio stream to begin call");
|
171
115
|
}
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
if (
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
if (debug)
|
180
|
-
console.log("Incoming call, waiting for user interaction before opening audio");
|
116
|
+
get isMuted() {
|
117
|
+
if (this._outputStream === null) return false;
|
118
|
+
const audio = this._outputStream?.getAudioTracks();
|
119
|
+
if (audio) {
|
120
|
+
for (const track of audio) {
|
121
|
+
if (!track.enabled) return true;
|
122
|
+
}
|
181
123
|
}
|
182
|
-
|
183
|
-
if (this.call.open && !this.closed) {
|
184
|
-
if (debug)
|
185
|
-
console.log("Setup audio and begin listening");
|
186
|
-
|
187
|
-
// console.log(track);
|
188
|
-
this.stream = new MediaStream([track as MediaStreamTrack]);
|
189
|
-
|
190
|
-
|
191
|
-
// three does not work?
|
192
|
-
const listener = new THREE.AudioListener();
|
193
|
-
this.audio = new THREE.Audio(listener);
|
194
|
-
this.audio.setVolume(this.voip.muteInput ? 0 : 1);
|
195
|
-
|
196
|
-
// this.stream = track as MediaStream;
|
197
|
-
// if (!this.stream)
|
198
|
-
this.audio.setMediaStreamSource(this.stream);
|
199
|
-
// this.obj.add(listener);
|
200
|
-
// this.obj.add(this.audio);
|
201
|
-
|
202
|
-
// stream only plays if we create this audio element too
|
203
|
-
const audio: HTMLAudioElement = document.createElement('audio');
|
204
|
-
this.audioElement = audio;
|
205
|
-
audio.style.display = "none";
|
206
|
-
document.body.appendChild(audio);
|
207
|
-
audio.srcObject = this.stream;
|
208
|
-
if (audio["sinkId"] !== undefined) {
|
209
|
-
// select speaker output for mobile devices
|
210
|
-
navigator.mediaDevices.enumerateDevices().then(devices => {
|
211
|
-
if (!audio) return;
|
212
|
-
console.log(devices);
|
213
|
-
for (const dev of devices) {
|
214
|
-
if (dev.label === "Speakerphone") {
|
215
|
-
audio["sinkId"] = dev.deviceId;
|
216
|
-
break;
|
217
|
-
}
|
218
|
-
}
|
219
|
-
});
|
220
|
-
}
|
221
|
-
// audio.play();
|
222
|
-
|
223
|
-
// this.audio.setMediaElementSource(audio);
|
224
|
-
if (debug)
|
225
|
-
console.log("call is setup, you should hear something now");
|
226
|
-
|
227
|
-
this.analyzer = new AudioAnalyser(this.audio, 32);
|
228
|
-
|
229
|
-
// const context = this.audio.context;
|
230
|
-
// context.audioWorklet.addModule('./include/Voip_Volume.js').then(() => {
|
231
|
-
// const samplingNode = new AudioWorkletNode(context, "white-noise-processor");
|
232
|
-
// samplingNode.connect(context.destination);
|
233
|
-
// }).catch(err => {
|
234
|
-
// console.error(err);
|
235
|
-
// });
|
236
|
-
}
|
237
|
-
});
|
124
|
+
return false;
|
238
125
|
}
|
239
126
|
|
240
|
-
|
241
|
-
this.closed = true;
|
242
|
-
if (this.call?.open)
|
243
|
-
this.call.close();
|
244
|
-
this.audio?.disconnect();
|
245
|
-
this.stream?.getTracks().forEach(track => { track.stop(); });
|
246
|
-
this.stream = null;
|
247
|
-
if (this.audioElement)
|
248
|
-
this.audioElement.remove();
|
249
|
-
}
|
250
|
-
}
|
127
|
+
// private _analyzer?: AudioAnalyser;
|
251
128
|
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
if (value === this._inputMuted) return;
|
259
|
-
this._inputMuted = value;
|
260
|
-
if (!this.currentIncomingCalls) return;
|
261
|
-
const vol = this._inputMuted ? 0 : 1;
|
262
|
-
for (const cur in this.currentIncomingCalls) {
|
263
|
-
const call = this.currentIncomingCalls[cur];
|
264
|
-
call?.currentAudio?.setVolume(vol);
|
129
|
+
/** @deprecated */
|
130
|
+
public getFrequency(_userId: string | null): number | null {
|
131
|
+
if (!this["unsupported_getfrequency"]) {
|
132
|
+
this["unsupported_getfrequency"] = true;
|
133
|
+
if (isDevEnvironment()) showBalloonWarning("VOIP: getFrequency is currently not supported");
|
134
|
+
console.warn("VOIP: getFrequency is currently not supported");
|
265
135
|
}
|
136
|
+
// null is get the first with some data
|
137
|
+
// if (userId === null) {
|
138
|
+
// for (const c in this._incomingStreams) {
|
139
|
+
// const call = this._incomingStreams[c];
|
140
|
+
// if (call && call.currentAnalyzer) return call.currentAnalyzer.getAverageFrequency();
|
141
|
+
// }
|
142
|
+
// return null;
|
143
|
+
// }
|
144
|
+
// const call = this._incomingStreams.get(userId);
|
145
|
+
// if (call && call.currentAnalyzer) return call.currentAnalyzer.getAverageFrequency();
|
146
|
+
return null;
|
266
147
|
}
|
267
|
-
get muteInput(): boolean {
|
268
|
-
return this._inputMuted;
|
269
|
-
}
|
270
148
|
|
271
|
-
|
272
|
-
if (
|
273
|
-
|
274
|
-
|
275
|
-
for (const cur in this.connections) {
|
276
|
-
const call = this.connections[cur];
|
277
|
-
call?.updateMute(value);
|
149
|
+
private async getAudioStream(audio?: MediaTrackConstraints) {
|
150
|
+
if (!navigator.mediaDevices.getUserMedia) {
|
151
|
+
console.error("No getDisplayMedia support");
|
152
|
+
return null;
|
278
153
|
}
|
154
|
+
|
155
|
+
const myStream = await navigator.mediaDevices.getUserMedia({ audio: audio ?? true, video: false })
|
156
|
+
.catch((err) => {
|
157
|
+
console.warn("VOIP failed getting audio stream", err);
|
158
|
+
return null;
|
159
|
+
});
|
160
|
+
return myStream;
|
279
161
|
}
|
280
|
-
get muteOutput(): boolean {
|
281
|
-
return this._outputMuted;
|
282
|
-
}
|
283
162
|
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
}
|
291
|
-
return null;
|
163
|
+
// we have to wait for the user to connect to a room when "auto connect" is enabled
|
164
|
+
private onJoinedRoom = async () => {
|
165
|
+
// Wait a moment for user list to be populated
|
166
|
+
await delay(300)
|
167
|
+
if (this.autoConnect && !this.isSending) {
|
168
|
+
this.connect();
|
292
169
|
}
|
293
|
-
const call = this.currentIncomingCalls[userId];
|
294
|
-
if (call && call.currentAnalyzer) return call.currentAnalyzer.getAverageFrequency();
|
295
|
-
return null;
|
296
170
|
}
|
297
171
|
|
298
|
-
private
|
299
|
-
private model: PeerModel | null = null;
|
300
|
-
private connections: { [key: string]: PeerConnection | null } = {};
|
301
|
-
private currentIncomingCalls: { [key: string]: AudioConnection | null } = {};
|
172
|
+
private _incomingStreams: Map<string, HTMLAudioElement> = new Map();
|
302
173
|
|
303
|
-
private
|
304
|
-
|
174
|
+
private onReceiveStream = (evt: StreamReceivedEvent) => {
|
175
|
+
const userId = evt.target.userId;
|
176
|
+
const stream = evt.stream;
|
305
177
|
|
306
|
-
|
307
|
-
if (
|
308
|
-
|
309
|
-
|
178
|
+
const existing = this._incomingStreams.get(userId);
|
179
|
+
if (existing) {
|
180
|
+
existing.srcObject = stream;
|
181
|
+
existing.setAttribute("autoplay", "true");
|
310
182
|
}
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
183
|
+
else {
|
184
|
+
const element = new Audio()
|
185
|
+
this._incomingStreams.set(userId, element);
|
186
|
+
element.setAttribute("autoplay", "true");
|
187
|
+
element.srcObject = stream;
|
315
188
|
}
|
316
|
-
|
317
|
-
if (utils.isiOS() && utils.isSafari()) {
|
318
|
-
console.log("VOIP is currently not supported on Safari iOS")
|
319
|
-
return;
|
320
|
-
}
|
321
|
-
|
322
|
-
this.peer = getPeerjsInstance();
|
323
|
-
navigator["getUserMedia"] = (navigator["getUserMedia"] || navigator["webkitGetUserMedia"] || navigator["mozGetUserMedia"] || navigator["msGetUserMedia"]);
|
324
|
-
|
325
|
-
|
326
|
-
// navigator.mediaDevices.enumerateDevices().then(console.log);
|
327
|
-
|
328
|
-
this.context.connection.beginListen(RoomEvents.JoinedRoom, _evt => {
|
329
|
-
// request mic once
|
330
|
-
navigator.mediaDevices.getUserMedia({ audio: true, video: false }).catch(err => {
|
331
|
-
console.error("Error initializing VoIP connection.", err);
|
332
|
-
});
|
333
|
-
});
|
334
|
-
|
335
|
-
this.context.connection.beginListen(PeerMessage.Update_ID, (cb: IPeerUpdateResponse) => {
|
336
|
-
if (cb.id !== this.context.connection.connectionId) {
|
337
|
-
const prevConnection = this.connections[cb.id];
|
338
|
-
if (prevConnection) {
|
339
|
-
prevConnection.close();
|
340
|
-
}
|
341
|
-
if (this.peer && this.context.connection.connectionId) {
|
342
|
-
const newConnection = new PeerConnection(this, this.peer, this.context.connection.connectionId, cb.peerId);
|
343
|
-
this.connections[cb.id] = newConnection;
|
344
|
-
newConnection.startVoipCall();
|
345
|
-
}
|
346
|
-
}
|
347
|
-
});
|
348
|
-
this.context.connection.beginListen(RoomEvents.UserLeftRoom, evt => {
|
349
|
-
const { userId: id } = evt;
|
350
|
-
const activeConnection = this.connections[id];
|
351
|
-
this.connections[id] = null;
|
352
|
-
if (activeConnection) {
|
353
|
-
activeConnection.close();
|
354
|
-
}
|
355
|
-
const incoming = this.currentIncomingCalls[id];
|
356
|
-
if (debug)
|
357
|
-
console.log("UserLeftRoom", evt, id, incoming);
|
358
|
-
if (incoming) {
|
359
|
-
incoming.close();
|
360
|
-
this.currentIncomingCalls[id] = null;
|
361
|
-
}
|
362
|
-
});
|
363
|
-
|
364
|
-
this.peer.on('open', this.onOpenPeerConnection.bind(this));
|
365
189
|
}
|
366
190
|
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
// }
|
191
|
+
private onStreamEnded = (evt: StreamEndedEvent) => {
|
192
|
+
const existing = this._incomingStreams.get(evt.userId);
|
193
|
+
disposeStream(existing?.srcObject as MediaStream);
|
194
|
+
this._incomingStreams.delete(evt.userId);
|
372
195
|
}
|
373
196
|
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
const call = this.currentIncomingCalls[key];
|
379
|
-
call?.close();
|
380
|
-
const con = this.connections[key];
|
381
|
-
con?.close();
|
382
|
-
}
|
383
|
-
catch (err) {
|
384
|
-
console.error(err);
|
385
|
-
}
|
197
|
+
private onEnabledChanged = () => {
|
198
|
+
for (const key of this._incomingStreams) {
|
199
|
+
const element = key[1];
|
200
|
+
element.muted = !this.enabled;
|
386
201
|
}
|
387
202
|
}
|
388
203
|
|
389
|
-
// update() {
|
390
|
-
// if (this.context.time.frameCount % 20 !== 0) return;
|
391
|
-
// for (const c in this.currentIncomingCalls) {
|
392
|
-
// const call = this.currentIncomingCalls[c];
|
393
|
-
// if (!call || !call.currentAnalyzer) continue;
|
394
|
-
// const vol = call.currentAnalyzer.getAverageFrequency();
|
395
|
-
// if (vol !== 0) {
|
396
204
|
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
console.log("Peer connection established and received id");
|
406
|
-
|
407
|
-
this.model = new PeerModel(id);
|
408
|
-
this.context.connection.send(PeerMessage.Update_ID, this.model, SendQueue.OnRoomJoin);
|
409
|
-
|
410
|
-
if (this.peer) {
|
411
|
-
this.peer.on('call', this.onReceiveCall.bind(this));
|
412
|
-
|
413
|
-
this.peer.on('connection', function (conn) {
|
414
|
-
if (debug)
|
415
|
-
console.log("CONNECTION", conn);
|
416
|
-
conn.on('data', function (data) {
|
417
|
-
if (debug)
|
418
|
-
console.log('Received', data);
|
419
|
-
});
|
420
|
-
});
|
205
|
+
private onVisibilityChanged = () => {
|
206
|
+
if (this.runInBackground) return;
|
207
|
+
const visible = document.visibilityState === "visible";
|
208
|
+
const muted = !visible;
|
209
|
+
this.setMuted(muted);
|
210
|
+
for (const key of this._incomingStreams) {
|
211
|
+
const str = key[1];
|
212
|
+
str.muted = muted;
|
421
213
|
}
|
422
|
-
}
|
423
|
-
|
424
|
-
private async onReceiveCall(call) {
|
425
|
-
if (!call) return;
|
426
|
-
|
427
|
-
const { metadata } = call;
|
428
|
-
console.assert(metadata.userId);
|
429
|
-
const { userId } = metadata;
|
430
|
-
const { peer: peerId } = call;
|
431
|
-
const currentCall = this.currentIncomingCalls[userId];
|
432
|
-
if (currentCall) {
|
433
|
-
currentCall.close();
|
434
|
-
}
|
435
|
-
if (debug)
|
436
|
-
console.log("received call");
|
437
|
-
|
438
|
-
// if we have mic permissions we can answer with our own mic
|
439
|
-
if (await Voip.HasMicrophonePermissions()) {
|
440
|
-
try {
|
441
|
-
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });
|
442
|
-
call.answer(stream);
|
443
|
-
}
|
444
|
-
catch (err) {
|
445
|
-
console.error("Error initializing VoIP connection.", err);
|
446
|
-
}
|
447
|
-
}
|
448
|
-
// otherwise take the call but dont send any audio ourselves
|
449
|
-
else call.answer(null);
|
450
|
-
|
451
|
-
this.currentIncomingCalls[userId] = new AudioConnection(this, this.gameObject, call);
|
452
|
-
|
453
|
-
// let done = false;
|
454
|
-
call.on('stream', remoteStream => {
|
455
|
-
if (debug)
|
456
|
-
console.log("receive caller stream, will setup audio now");
|
457
|
-
this.currentIncomingCalls[userId]?.openAudioStream(remoteStream);
|
458
|
-
});
|
459
|
-
call.on("error", console.error);
|
460
214
|
};
|
461
|
-
|
462
|
-
// update(): void {
|
463
|
-
// for (const k in this.currentIncomingCalls) {
|
464
|
-
// const currentCall = this.currentIncomingCalls[k];
|
465
|
-
// if (currentCall && currentCall.currentAnalyzer) {
|
466
|
-
// console.log(currentCall.currentAnalyzer.getAverageFrequency());
|
467
|
-
// // // const c = currentCall.currentAudio.getOutput();
|
468
|
-
// // console.log(c, c.gain.value);
|
469
|
-
// }
|
470
|
-
// }
|
471
|
-
// }
|
472
|
-
|
473
|
-
public static async HasMicrophonePermissions(): Promise<boolean> {
|
474
|
-
try {
|
475
|
-
//@ts-ignore
|
476
|
-
const res = await navigator.permissions.query({ name: 'microphone' });
|
477
|
-
if (res.state === "denied") {
|
478
|
-
return false;
|
479
|
-
}
|
480
|
-
return true;
|
481
|
-
}
|
482
|
-
catch (err) {
|
483
|
-
console.error("Error querying `microphone` permissions.", err);
|
484
|
-
return false;
|
485
|
-
}
|
486
|
-
}
|
487
|
-
}
|
215
|
+
}
|
@@ -2,7 +2,7 @@
|
|
2
2
|
import { serializeable } from "../../engine/engine_serialization_decorator.js";
|
3
3
|
import { getParam } from "../../engine/engine_utils.js";
|
4
4
|
import { VolumeProfile } from "./VolumeProfile.js";
|
5
|
-
import { EditorModification, IEditorModification as IEditorModificationReceiver } from "../../engine/engine_editor-sync.js";
|
5
|
+
import type { EditorModification, IEditorModification as IEditorModificationReceiver } from "../../engine/engine_editor-sync.js";
|
6
6
|
import { PostProcessingHandler } from "./PostProcessingHandler.js";
|
7
7
|
import { PostProcessingEffect } from "./PostProcessingEffect.js";
|
8
8
|
import { VolumeParameter } from "./VolumeParameter.js";
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import {
|
1
|
+
import { Color, Euler, EventDispatcher, Group, Matrix4, Mesh, MeshBasicMaterial, Object3D, Quaternion, RingGeometry, Texture, Vector3, type WebXRArrayCamera } from 'three';
|
2
2
|
import { ARButton } from '../../include/three/ARButton.js';
|
3
3
|
import { VRButton } from '../../include/three/VRButton.js';
|
4
4
|
|
@@ -6,7 +6,7 @@
|
|
6
6
|
import { serializable } from "../../engine/engine_serialization_decorator.js";
|
7
7
|
import { XRSessionMode } from "../../engine/engine_setup.js";
|
8
8
|
import { getWorldPosition, getWorldQuaternion, setWorldPosition, setWorldQuaternion } from "../../engine/engine_three_utils.js";
|
9
|
-
import { INeedleEngineComponent } from "../../engine/engine_types.js";
|
9
|
+
import type { INeedleEngineComponent } from "../../engine/engine_types.js";
|
10
10
|
import { getParam, isMozillaXR, isQuest, setOrAddParamsToUrl } from "../../engine/engine_utils.js";
|
11
11
|
|
12
12
|
import { Behaviour, GameObject } from "../Component.js";
|
@@ -15,7 +15,7 @@
|
|
15
15
|
import { ControllerType, WebXRController } from "./WebXRController.js";
|
16
16
|
import { XRRig } from "./WebXRRig.js";
|
17
17
|
import { WebXRSync } from "./WebXRSync.js";
|
18
|
-
import {
|
18
|
+
import { XRState, XRStateFlag } from "../XRFlag.js";
|
19
19
|
import { showBalloonWarning } from '../../engine/debug/index.js';
|
20
20
|
import { isDestroyed } from '../../engine/engine_gameobject.js';
|
21
21
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { BoxHelper, BufferGeometry, Color, Euler, Group, Intersection, Layers, Line, LineBasicMaterial, Material, Mesh, MeshBasicMaterial, Object3D, PerspectiveCamera, Quaternion, Ray, SphereGeometry, Vector2, Vector3 } from "three";
|
1
|
+
import { BoxHelper, BufferGeometry, Color, Euler, Group, type Intersection, Layers, Line, LineBasicMaterial, Material, Mesh, MeshBasicMaterial, Object3D, PerspectiveCamera, Quaternion, Ray, SphereGeometry, Vector2, Vector3 } from "three";
|
2
2
|
import { OculusHandModel } from 'three/examples/jsm/webxr/OculusHandModel.js';
|
3
3
|
import { OculusHandPointerModel } from 'three/examples/jsm/webxr/OculusHandPointerModel.js';
|
4
4
|
import { XRControllerModel, XRControllerModelFactory } from 'three/examples/jsm/webxr/XRControllerModelFactory.js';
|
@@ -4,7 +4,7 @@
|
|
4
4
|
import { Object3D, Vector3 } from "three";
|
5
5
|
import { PlayerColor } from "../PlayerColor.js";
|
6
6
|
import { Context } from "../../engine/engine_setup.js";
|
7
|
-
import { IModel, SendQueue } from "../../engine/engine_networking_types.js";
|
7
|
+
import { type IModel, SendQueue } from "../../engine/engine_networking_types.js";
|
8
8
|
|
9
9
|
enum XRGrabEvent {
|
10
10
|
StartOrUpdate = "xr-grab-visual-start-or-update",
|
@@ -4,7 +4,7 @@
|
|
4
4
|
import { Behaviour, GameObject } from "../Component.js";
|
5
5
|
import { WebXR, WebXREvent } from "./WebXR.js";
|
6
6
|
import { serializable } from "../../engine/engine_serialization.js";
|
7
|
-
import { Vec3 } from "../../engine/engine_types.js";
|
7
|
+
import type { Vec3 } from "../../engine/engine_types.js";
|
8
8
|
import { disposeObjectResources } from "../../engine/engine_assetdatabase.js";
|
9
9
|
import { getParam } from "../../engine/engine_utils.js";
|
10
10
|
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import { Object3D } from "three";
|
2
|
-
import { IGameObject } from "../../engine/engine_types.js";
|
2
|
+
import type { IGameObject } from "../../engine/engine_types.js";
|
3
3
|
import { getParam } from "../../engine/engine_utils.js";
|
4
|
-
import { Behaviour
|
4
|
+
import { Behaviour } from "../Component.js";
|
5
5
|
import { BoxGizmo } from "../Gizmos.js";
|
6
6
|
|
7
7
|
const debug = getParam("debugrig");
|
@@ -0,0 +1,73 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
5
|
+
}) : (function(o, m, k, k2) {
|
6
|
+
if (k2 === undefined) k2 = k;
|
7
|
+
o[k2] = m[k];
|
8
|
+
}));
|
9
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
10
|
+
for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p);
|
11
|
+
};
|
12
|
+
exports.__esModule = true;
|
13
|
+
__exportStar(require("./extensions/index.js"), exports);
|
14
|
+
__exportStar(require("./engine_addressables.js"), exports);
|
15
|
+
__exportStar(require("./engine_application.js"), exports);
|
16
|
+
__exportStar(require("./engine_assetdatabase.js"), exports);
|
17
|
+
__exportStar(require("./engine_create_objects.js"), exports);
|
18
|
+
__exportStar(require("./engine_components_internal.js"), exports);
|
19
|
+
__exportStar(require("./engine_components.js"), exports);
|
20
|
+
__exportStar(require("./engine_components_internal.js"), exports);
|
21
|
+
__exportStar(require("./engine_context_registry.js"), exports);
|
22
|
+
__exportStar(require("./engine_context.js"), exports);
|
23
|
+
__exportStar(require("./engine_coroutine.js"), exports);
|
24
|
+
__exportStar(require("./engine_constants.js"), exports);
|
25
|
+
__exportStar(require("./debug/index.js"), exports);
|
26
|
+
__exportStar(require("./engine_element.js"), exports);
|
27
|
+
__exportStar(require("./engine_element_loading.js"), exports);
|
28
|
+
__exportStar(require("./engine_element_attributes.js"), exports);
|
29
|
+
var engine_gizmos_js_1 = require("./engine_gizmos.js");
|
30
|
+
__createBinding(exports, engine_gizmos_js_1, "Gizmos");
|
31
|
+
__exportStar(require("./engine_gltf.js"), exports);
|
32
|
+
__exportStar(require("./engine_hot_reload.js"), exports);
|
33
|
+
__exportStar(require("./engine_gameobject.js"), exports);
|
34
|
+
__exportStar(require("./engine_networking.js"), exports);
|
35
|
+
__exportStar(require("./engine_networking_types.js"), exports);
|
36
|
+
var engine_networking_auto_js_1 = require("./engine_networking_auto.js");
|
37
|
+
__createBinding(exports, engine_networking_auto_js_1, "syncField");
|
38
|
+
__exportStar(require("./engine_networking_files.js"), exports);
|
39
|
+
__exportStar(require("./engine_networking_instantiate.js"), exports);
|
40
|
+
__exportStar(require("./engine_networking_streams.js"), exports);
|
41
|
+
__exportStar(require("./engine_networking_utils.js"), exports);
|
42
|
+
__exportStar(require("./engine_networking_peer.js"), exports);
|
43
|
+
__exportStar(require("./engine_patcher.js"), exports);
|
44
|
+
__exportStar(require("./engine_playerview.js"), exports);
|
45
|
+
__exportStar(require("./engine_physics.js"), exports);
|
46
|
+
__exportStar(require("./engine_physics.types.js"), exports);
|
47
|
+
__exportStar(require("./engine_physics_rapier.js"), exports);
|
48
|
+
__exportStar(require("./engine_scenelighting.js"), exports);
|
49
|
+
__exportStar(require("./engine_input.js"), exports);
|
50
|
+
__exportStar(require("./engine_math.js"), exports);
|
51
|
+
__exportStar(require("./js-extensions/index.js"), exports);
|
52
|
+
__exportStar(require("./engine_scenetools.js"), exports);
|
53
|
+
__exportStar(require("./engine_serialization.js"), exports);
|
54
|
+
var engine_serialization_core_js_1 = require("./engine_serialization_core.js");
|
55
|
+
__createBinding(exports, engine_serialization_core_js_1, "type");
|
56
|
+
__exportStar(require("./engine_texture.js"), exports);
|
57
|
+
__exportStar(require("./engine_three_utils.js"), exports);
|
58
|
+
__exportStar(require("./engine_time.js"), exports);
|
59
|
+
__exportStar(require("./engine_types.js"), exports);
|
60
|
+
__exportStar(require("./engine_utils_screenshot.js"), exports);
|
61
|
+
__exportStar(require("./engine_web_api.js"), exports);
|
62
|
+
__exportStar(require("./engine_utils.js"), exports);
|
63
|
+
var engine_typestore_js_1 = require("./engine_typestore.js");
|
64
|
+
__createBinding(exports, engine_typestore_js_1, "TypeStore");
|
65
|
+
__createBinding(exports, engine_typestore_js_1, "registerType");
|
66
|
+
var engine_instancing_js_1 = require("./engine_instancing.js");
|
67
|
+
__createBinding(exports, engine_instancing_js_1, "InstancingUtil");
|
68
|
+
var engine_util_decorator_js_1 = require("./engine_util_decorator.js");
|
69
|
+
__createBinding(exports, engine_util_decorator_js_1, "validate");
|
70
|
+
__createBinding(exports, engine_util_decorator_js_1, "prefix");
|
71
|
+
var engine_license_js_1 = require("./engine_license.js");
|
72
|
+
__createBinding(exports, engine_license_js_1, "hasProLicense");
|
73
|
+
__createBinding(exports, engine_license_js_1, "hasIndieLicense");
|
@@ -0,0 +1,513 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __extends = (this && this.__extends) || (function () {
|
3
|
+
var extendStatics = function (d, b) {
|
4
|
+
extendStatics = Object.setPrototypeOf ||
|
5
|
+
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
6
|
+
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
7
|
+
return extendStatics(d, b);
|
8
|
+
};
|
9
|
+
return function (d, b) {
|
10
|
+
extendStatics(d, b);
|
11
|
+
function __() { this.constructor = d; }
|
12
|
+
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
13
|
+
};
|
14
|
+
})();
|
15
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
16
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
17
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
18
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
19
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
20
|
+
};
|
21
|
+
exports.__esModule = true;
|
22
|
+
exports.AudioSource = exports.AudioRolloffMode = void 0;
|
23
|
+
var Component_js_1 = require("./Component.js");
|
24
|
+
var PositionalAudioHelper_js_1 = require("three/examples/jsm/helpers/PositionalAudioHelper.js");
|
25
|
+
var AudioListener_js_1 = require("./AudioListener.js");
|
26
|
+
var utils = require("../engine/engine_utils.js");
|
27
|
+
var engine_serialization_decorator_js_1 = require("../engine/engine_serialization_decorator.js");
|
28
|
+
var engine_application_js_1 = require("../engine/engine_application.js");
|
29
|
+
var three_1 = require("three");
|
30
|
+
var debug = utils.getParam("debugaudio");
|
31
|
+
var AudioRolloffMode;
|
32
|
+
(function (AudioRolloffMode) {
|
33
|
+
/// <summary>
|
34
|
+
/// <para>Use this mode when you want a real-world rolloff.</para>
|
35
|
+
/// </summary>
|
36
|
+
AudioRolloffMode[AudioRolloffMode["Logarithmic"] = 0] = "Logarithmic";
|
37
|
+
/// <summary>
|
38
|
+
/// <para>Use this mode when you want to lower the volume of your sound over the distance.</para>
|
39
|
+
/// </summary>
|
40
|
+
AudioRolloffMode[AudioRolloffMode["Linear"] = 1] = "Linear";
|
41
|
+
/// <summary>
|
42
|
+
/// <para>Use this when you want to use a custom rolloff.</para>
|
43
|
+
/// </summary>
|
44
|
+
AudioRolloffMode[AudioRolloffMode["Custom"] = 2] = "Custom";
|
45
|
+
})(AudioRolloffMode = exports.AudioRolloffMode || (exports.AudioRolloffMode = {}));
|
46
|
+
var AudioSource = /** @class */ (function (_super) {
|
47
|
+
__extends(AudioSource, _super);
|
48
|
+
function AudioSource() {
|
49
|
+
var _this = _super !== null && _super.apply(this, arguments) || this;
|
50
|
+
_this.clip = "";
|
51
|
+
_this.playOnAwake = false;
|
52
|
+
_this._spatialBlend = 0;
|
53
|
+
_this._minDistance = 1;
|
54
|
+
_this._maxDistance = 100;
|
55
|
+
_this._volume = 1;
|
56
|
+
_this.rollOffMode = 0;
|
57
|
+
_this.playInBackground = true;
|
58
|
+
_this._loop = false;
|
59
|
+
_this.sound = null;
|
60
|
+
_this.helper = null;
|
61
|
+
_this.wasPlaying = false;
|
62
|
+
_this.audioLoader = null;
|
63
|
+
_this.shouldPlay = false;
|
64
|
+
// set this from audio context time, used to set clip offset when setting "time" property
|
65
|
+
// there is maybe a better way to set a audio clip current time?!
|
66
|
+
_this._lastClipStartedLoading = null;
|
67
|
+
_this._audioElement = null;
|
68
|
+
_this.onVisibilityChanged = function () {
|
69
|
+
switch (document.visibilityState) {
|
70
|
+
case "hidden":
|
71
|
+
if (_this.playInBackground === false || utils.isMobileDevice()) {
|
72
|
+
_this.wasPlaying = _this.isPlaying;
|
73
|
+
if (_this.isPlaying) {
|
74
|
+
_this.pause();
|
75
|
+
}
|
76
|
+
}
|
77
|
+
break;
|
78
|
+
case "visible":
|
79
|
+
if (debug)
|
80
|
+
console.log("visible", _this.enabled, _this.playOnAwake, !_this.isPlaying, AudioSource._userInteractionRegistered, _this.wasPlaying);
|
81
|
+
if (_this.enabled && _this.playOnAwake && !_this.isPlaying && AudioSource._userInteractionRegistered && _this.wasPlaying) {
|
82
|
+
_this.play();
|
83
|
+
}
|
84
|
+
break;
|
85
|
+
}
|
86
|
+
};
|
87
|
+
_this.onApplicationMuteChanged = function () {
|
88
|
+
var _a, _b;
|
89
|
+
if (_this.context.application.muted)
|
90
|
+
(_a = _this.sound) === null || _a === void 0 ? void 0 : _a.setVolume(0);
|
91
|
+
else
|
92
|
+
(_b = _this.sound) === null || _b === void 0 ? void 0 : _b.setVolume(_this.volume);
|
93
|
+
};
|
94
|
+
_this.lerp = function (x, y, a) { return x * (1 - a) + y * a; };
|
95
|
+
_this.createAudio = function (buffer) {
|
96
|
+
if (debug)
|
97
|
+
console.log("audio buffer loaded");
|
98
|
+
AudioSource.registerWaitForAllowAudio(function () {
|
99
|
+
if (debug)
|
100
|
+
console.log("finished loading", buffer);
|
101
|
+
var sound = _this.Sound;
|
102
|
+
if (!sound) {
|
103
|
+
console.warn("Failed getting sound", _this.name);
|
104
|
+
return;
|
105
|
+
}
|
106
|
+
if (sound.isPlaying)
|
107
|
+
sound.stop();
|
108
|
+
if (buffer)
|
109
|
+
sound.setBuffer(buffer);
|
110
|
+
sound.loop = _this._loop;
|
111
|
+
if (_this.context.application.muted)
|
112
|
+
sound.setVolume(0);
|
113
|
+
else
|
114
|
+
sound.setVolume(_this.volume);
|
115
|
+
sound.autoplay = _this.shouldPlay;
|
116
|
+
// sound.setDistanceModel('linear');
|
117
|
+
// sound.setRolloffFactor(1);
|
118
|
+
_this.applySpatialDistanceSettings();
|
119
|
+
// sound.setDirectionalCone(180, 360, 0.1);
|
120
|
+
if (sound.isPlaying)
|
121
|
+
sound.stop();
|
122
|
+
if (debug)
|
123
|
+
console.log(_this.name, _this.shouldPlay, AudioSource.userInteractionRegistered, _this);
|
124
|
+
if (_this.shouldPlay && AudioSource._userInteractionRegistered)
|
125
|
+
_this.play();
|
126
|
+
});
|
127
|
+
};
|
128
|
+
_this._lastContextTime = 0;
|
129
|
+
_this._hasEnded = true;
|
130
|
+
_this._needUpdateSpatialDistanceSettings = false;
|
131
|
+
return _this;
|
132
|
+
}
|
133
|
+
Object.defineProperty(AudioSource, "userInteractionRegistered", {
|
134
|
+
get: function () {
|
135
|
+
if (!AudioSource._didCallBeginWaitForUserInteraction) {
|
136
|
+
AudioSource._didCallBeginWaitForUserInteraction = true;
|
137
|
+
AudioSource._beginWaitForUserInteraction();
|
138
|
+
}
|
139
|
+
return AudioSource._userInteractionRegistered;
|
140
|
+
},
|
141
|
+
enumerable: false,
|
142
|
+
configurable: true
|
143
|
+
});
|
144
|
+
AudioSource.registerWaitForAllowAudio = function (cb) {
|
145
|
+
if (cb !== null) {
|
146
|
+
if (this._userInteractionRegistered) {
|
147
|
+
cb();
|
148
|
+
return;
|
149
|
+
}
|
150
|
+
if (this.callbacks.indexOf(cb) === -1)
|
151
|
+
this.callbacks.push(cb);
|
152
|
+
if (!AudioSource._didCallBeginWaitForUserInteraction) {
|
153
|
+
AudioSource._didCallBeginWaitForUserInteraction = true;
|
154
|
+
AudioSource._beginWaitForUserInteraction();
|
155
|
+
}
|
156
|
+
}
|
157
|
+
};
|
158
|
+
AudioSource._beginWaitForUserInteraction = function (cb) {
|
159
|
+
var _this = this;
|
160
|
+
if (cb === void 0) { cb = null; }
|
161
|
+
if (this._userInteractionRegistered) {
|
162
|
+
if (cb)
|
163
|
+
cb();
|
164
|
+
return;
|
165
|
+
}
|
166
|
+
if (cb !== null)
|
167
|
+
this.registerWaitForAllowAudio(cb);
|
168
|
+
var callback = function () {
|
169
|
+
if (fn == undefined)
|
170
|
+
return;
|
171
|
+
if (AudioSource._userInteractionRegistered)
|
172
|
+
return;
|
173
|
+
AudioSource._userInteractionRegistered = true;
|
174
|
+
if (debug)
|
175
|
+
console.log("🔊 registered interaction, can play audio now");
|
176
|
+
document.removeEventListener('pointerdown', fn);
|
177
|
+
document.removeEventListener('click', fn);
|
178
|
+
document.removeEventListener('dragstart', fn);
|
179
|
+
document.removeEventListener('touchstart', fn);
|
180
|
+
for (var _i = 0, _a = _this.callbacks; _i < _a.length; _i++) {
|
181
|
+
var cb_1 = _a[_i];
|
182
|
+
cb_1();
|
183
|
+
}
|
184
|
+
_this.callbacks.length = 0;
|
185
|
+
};
|
186
|
+
var fn = callback.bind(this);
|
187
|
+
document.addEventListener('pointerdown', fn);
|
188
|
+
document.addEventListener('click', fn);
|
189
|
+
document.addEventListener('dragstart', fn);
|
190
|
+
document.addEventListener('touchstart', fn);
|
191
|
+
};
|
192
|
+
Object.defineProperty(AudioSource.prototype, "loop", {
|
193
|
+
get: function () {
|
194
|
+
if (this.sound)
|
195
|
+
this._loop = this.sound.getLoop();
|
196
|
+
return this._loop;
|
197
|
+
},
|
198
|
+
set: function (val) {
|
199
|
+
this._loop = val;
|
200
|
+
if (this.sound)
|
201
|
+
this.sound.setLoop(val);
|
202
|
+
},
|
203
|
+
enumerable: false,
|
204
|
+
configurable: true
|
205
|
+
});
|
206
|
+
Object.defineProperty(AudioSource.prototype, "spatialBlend", {
|
207
|
+
/** 0 = 2D, 1 = 3D */
|
208
|
+
get: function () {
|
209
|
+
return this._spatialBlend;
|
210
|
+
},
|
211
|
+
set: function (val) {
|
212
|
+
if (val === this._spatialBlend)
|
213
|
+
return;
|
214
|
+
this._spatialBlend = val;
|
215
|
+
this._needUpdateSpatialDistanceSettings = true;
|
216
|
+
},
|
217
|
+
enumerable: false,
|
218
|
+
configurable: true
|
219
|
+
});
|
220
|
+
Object.defineProperty(AudioSource.prototype, "minDistance", {
|
221
|
+
get: function () {
|
222
|
+
return this._minDistance;
|
223
|
+
},
|
224
|
+
set: function (val) {
|
225
|
+
if (this._minDistance === val)
|
226
|
+
return;
|
227
|
+
this._minDistance = val;
|
228
|
+
this._needUpdateSpatialDistanceSettings = true;
|
229
|
+
},
|
230
|
+
enumerable: false,
|
231
|
+
configurable: true
|
232
|
+
});
|
233
|
+
Object.defineProperty(AudioSource.prototype, "maxDistance", {
|
234
|
+
get: function () {
|
235
|
+
return this._maxDistance;
|
236
|
+
},
|
237
|
+
set: function (val) {
|
238
|
+
if (this._maxDistance === val)
|
239
|
+
return;
|
240
|
+
this._maxDistance = val;
|
241
|
+
this._needUpdateSpatialDistanceSettings = true;
|
242
|
+
},
|
243
|
+
enumerable: false,
|
244
|
+
configurable: true
|
245
|
+
});
|
246
|
+
Object.defineProperty(AudioSource.prototype, "volume", {
|
247
|
+
get: function () { return this._volume; },
|
248
|
+
set: function (val) {
|
249
|
+
this._volume = val;
|
250
|
+
if (this.sound && !this.context.application.muted) {
|
251
|
+
if (debug)
|
252
|
+
console.log(this.name, "audio set volume", val);
|
253
|
+
this.sound.setVolume(val);
|
254
|
+
}
|
255
|
+
},
|
256
|
+
enumerable: false,
|
257
|
+
configurable: true
|
258
|
+
});
|
259
|
+
Object.defineProperty(AudioSource.prototype, "Sound", {
|
260
|
+
get: function () {
|
261
|
+
var _a;
|
262
|
+
if (!this.sound && AudioSource._userInteractionRegistered) {
|
263
|
+
var listener = (_a = Component_js_1.GameObject.getComponent(this.context.mainCamera, AudioListener_js_1.AudioListener)) !== null && _a !== void 0 ? _a : Component_js_1.GameObject.findObjectOfType(AudioListener_js_1.AudioListener, this.context);
|
264
|
+
if (!listener && this.context.mainCamera)
|
265
|
+
listener = Component_js_1.GameObject.addNewComponent(this.context.mainCamera, AudioListener_js_1.AudioListener);
|
266
|
+
if (listener === null || listener === void 0 ? void 0 : listener.listener) {
|
267
|
+
this.sound = new three_1.PositionalAudio(listener.listener);
|
268
|
+
this.gameObject.add(this.sound);
|
269
|
+
}
|
270
|
+
else if (debug)
|
271
|
+
console.warn("No audio listener found in scene - can not play audio");
|
272
|
+
}
|
273
|
+
return this.sound;
|
274
|
+
},
|
275
|
+
enumerable: false,
|
276
|
+
configurable: true
|
277
|
+
});
|
278
|
+
Object.defineProperty(AudioSource.prototype, "ShouldPlay", {
|
279
|
+
get: function () { return this.shouldPlay; },
|
280
|
+
enumerable: false,
|
281
|
+
configurable: true
|
282
|
+
});
|
283
|
+
AudioSource.prototype.awake = function () {
|
284
|
+
this.audioLoader = new three_1.AudioLoader();
|
285
|
+
if (this.playOnAwake)
|
286
|
+
this.shouldPlay = true;
|
287
|
+
};
|
288
|
+
AudioSource.prototype.onEnable = function () {
|
289
|
+
var _this = this;
|
290
|
+
if (!AudioSource._userInteractionRegistered) {
|
291
|
+
AudioSource._beginWaitForUserInteraction(function () {
|
292
|
+
if (_this.enabled && !_this.destroyed && _this.shouldPlay)
|
293
|
+
_this.onNewClip(_this.clip);
|
294
|
+
});
|
295
|
+
}
|
296
|
+
else if (this.playOnAwake && this.context.application.isVisible) {
|
297
|
+
this.play();
|
298
|
+
}
|
299
|
+
globalThis.addEventListener('visibilitychange', this.onVisibilityChanged);
|
300
|
+
this.context.application.addEventListener(engine_application_js_1.ApplicationEvents.MuteChanged, this.onApplicationMuteChanged);
|
301
|
+
};
|
302
|
+
AudioSource.prototype.onDisable = function () {
|
303
|
+
globalThis.removeEventListener('visibilitychange', this.onVisibilityChanged);
|
304
|
+
this.context.application.removeEventListener(engine_application_js_1.ApplicationEvents.MuteChanged, this.onApplicationMuteChanged);
|
305
|
+
this.stop();
|
306
|
+
};
|
307
|
+
AudioSource.prototype.applySpatialDistanceSettings = function () {
|
308
|
+
var sound = this.sound;
|
309
|
+
if (!sound)
|
310
|
+
return;
|
311
|
+
this._needUpdateSpatialDistanceSettings = false;
|
312
|
+
var dist = this.lerp(10 * this._maxDistance / Math.max(0.0001, this.spatialBlend), this._minDistance, this.spatialBlend);
|
313
|
+
if (debug)
|
314
|
+
console.log(this.name, this._minDistance, this._maxDistance, this.spatialBlend, "Ref distance=" + dist);
|
315
|
+
sound.setRefDistance(dist);
|
316
|
+
sound.setMaxDistance(Math.max(0.01, this._maxDistance));
|
317
|
+
// https://developer.mozilla.org/en-US/docs/Web/API/PannerNode/distanceModel
|
318
|
+
switch (this.rollOffMode) {
|
319
|
+
case AudioRolloffMode.Logarithmic:
|
320
|
+
sound.setDistanceModel('exponential');
|
321
|
+
break;
|
322
|
+
case AudioRolloffMode.Linear:
|
323
|
+
sound.setDistanceModel('linear');
|
324
|
+
break;
|
325
|
+
case AudioRolloffMode.Custom:
|
326
|
+
break;
|
327
|
+
}
|
328
|
+
if (this.spatialBlend > 0) {
|
329
|
+
if (debug && !this.helper) {
|
330
|
+
this.helper = new PositionalAudioHelper_js_1.PositionalAudioHelper(sound, sound.getRefDistance());
|
331
|
+
sound.add(this.helper);
|
332
|
+
}
|
333
|
+
}
|
334
|
+
else if (this.helper && this.helper.parent) {
|
335
|
+
this.helper.removeFromParent();
|
336
|
+
}
|
337
|
+
};
|
338
|
+
AudioSource.prototype.onNewClip = function (clip) {
|
339
|
+
if (clip)
|
340
|
+
this.clip = clip;
|
341
|
+
if (typeof clip === "string") {
|
342
|
+
if (debug)
|
343
|
+
console.log(clip);
|
344
|
+
if (clip.endsWith(".mp3") || clip.endsWith(".wav")) {
|
345
|
+
if (!this.audioLoader)
|
346
|
+
this.audioLoader = new three_1.AudioLoader();
|
347
|
+
this.shouldPlay = true;
|
348
|
+
if (this._lastClipStartedLoading === clip) {
|
349
|
+
if (debug)
|
350
|
+
console.log("Is currently loading:", this._lastClipStartedLoading, this);
|
351
|
+
return;
|
352
|
+
}
|
353
|
+
this._lastClipStartedLoading = clip;
|
354
|
+
if (debug)
|
355
|
+
console.log("load audio", clip);
|
356
|
+
this.audioLoader.load(clip, this.createAudio, function () { }, console.error);
|
357
|
+
}
|
358
|
+
else
|
359
|
+
console.warn("Unsupported audio clip type", clip);
|
360
|
+
}
|
361
|
+
else {
|
362
|
+
this.shouldPlay = true;
|
363
|
+
this.createAudio();
|
364
|
+
}
|
365
|
+
};
|
366
|
+
/** Play a mediastream */
|
367
|
+
AudioSource.prototype.play = function (clip) {
|
368
|
+
var _a, _b;
|
369
|
+
if (clip === void 0) { clip = undefined; }
|
370
|
+
// We only support strings and media stream
|
371
|
+
// TODO: maybe we should return here if an invalid value is passed in
|
372
|
+
if (clip !== undefined && typeof clip !== "string" && !(clip instanceof MediaStream)) {
|
373
|
+
console.warn("Called play on AudioSource with unknown argument type", clip);
|
374
|
+
clip = undefined;
|
375
|
+
}
|
376
|
+
// Check if we need to call load first
|
377
|
+
var needsLoading = !this.sound || (clip && clip !== this.clip);
|
378
|
+
if (typeof clip === "string" && !this.audioLoader)
|
379
|
+
needsLoading = true;
|
380
|
+
if (clip instanceof MediaStream || typeof clip === "string")
|
381
|
+
this.clip = clip;
|
382
|
+
if (needsLoading) {
|
383
|
+
this.shouldPlay = true;
|
384
|
+
this.onNewClip(clip);
|
385
|
+
return;
|
386
|
+
}
|
387
|
+
this.shouldPlay = true;
|
388
|
+
this._hasEnded = false;
|
389
|
+
if (debug)
|
390
|
+
console.log("play", (_a = this.sound) === null || _a === void 0 ? void 0 : _a.getVolume(), this.sound);
|
391
|
+
if (this.sound && !this.sound.isPlaying) {
|
392
|
+
var muted = this.context.application.muted;
|
393
|
+
if (muted)
|
394
|
+
this.sound.setVolume(0);
|
395
|
+
if (this.clip instanceof MediaStream) {
|
396
|
+
// We have to set the audio element source to the mediastream as well
|
397
|
+
// otherwise it will not play for some reason...
|
398
|
+
this.sound.setMediaStreamSource(this.clip);
|
399
|
+
if (!this._audioElement) {
|
400
|
+
this._audioElement = document.createElement('audio');
|
401
|
+
this._audioElement.style.display = "none";
|
402
|
+
}
|
403
|
+
if (!this._audioElement.parentNode)
|
404
|
+
(_b = this.context.domElement.shadowRoot) === null || _b === void 0 ? void 0 : _b.append(this._audioElement);
|
405
|
+
this._audioElement.srcObject = this.clip;
|
406
|
+
this._audioElement.autoplay = false;
|
407
|
+
}
|
408
|
+
else {
|
409
|
+
if (this._audioElement)
|
410
|
+
this._audioElement.remove();
|
411
|
+
this.sound.play(muted ? .1 : 0);
|
412
|
+
}
|
413
|
+
}
|
414
|
+
};
|
415
|
+
AudioSource.prototype.pause = function () {
|
416
|
+
var _a, _b;
|
417
|
+
if (debug)
|
418
|
+
console.log("Pause", this);
|
419
|
+
this._hasEnded = true;
|
420
|
+
this.shouldPlay = false;
|
421
|
+
if (this.sound && this.sound.isPlaying && this.sound.source) {
|
422
|
+
this._lastContextTime = (_a = this.sound) === null || _a === void 0 ? void 0 : _a.context.currentTime;
|
423
|
+
this.sound.pause();
|
424
|
+
}
|
425
|
+
(_b = this._audioElement) === null || _b === void 0 ? void 0 : _b.remove();
|
426
|
+
};
|
427
|
+
AudioSource.prototype.stop = function () {
|
428
|
+
var _a, _b;
|
429
|
+
if (debug)
|
430
|
+
console.log("Pause", this);
|
431
|
+
this._hasEnded = true;
|
432
|
+
this.shouldPlay = false;
|
433
|
+
if (this.sound && this.sound.source) {
|
434
|
+
this._lastContextTime = (_a = this.sound) === null || _a === void 0 ? void 0 : _a.context.currentTime;
|
435
|
+
if (debug)
|
436
|
+
console.log(this._lastContextTime);
|
437
|
+
this.sound.stop();
|
438
|
+
}
|
439
|
+
(_b = this._audioElement) === null || _b === void 0 ? void 0 : _b.remove();
|
440
|
+
};
|
441
|
+
Object.defineProperty(AudioSource.prototype, "isPlaying", {
|
442
|
+
get: function () { var _a, _b; return (_b = (_a = this.sound) === null || _a === void 0 ? void 0 : _a.isPlaying) !== null && _b !== void 0 ? _b : false; },
|
443
|
+
set: function (_) { },
|
444
|
+
enumerable: false,
|
445
|
+
configurable: true
|
446
|
+
});
|
447
|
+
Object.defineProperty(AudioSource.prototype, "time", {
|
448
|
+
get: function () { var _a, _b; return ((_a = this.sound) === null || _a === void 0 ? void 0 : _a.source) ? (((_b = this.sound.source) === null || _b === void 0 ? void 0 : _b.context.currentTime) - this._lastContextTime + this.sound.offset) : 0; },
|
449
|
+
set: function (val) {
|
450
|
+
if (this.sound) {
|
451
|
+
if (val === this.sound.offset)
|
452
|
+
return;
|
453
|
+
var wasPlaying = this.isPlaying;
|
454
|
+
this.stop();
|
455
|
+
this.sound.offset = val;
|
456
|
+
if (wasPlaying)
|
457
|
+
this.play();
|
458
|
+
}
|
459
|
+
},
|
460
|
+
enumerable: false,
|
461
|
+
configurable: true
|
462
|
+
});
|
463
|
+
AudioSource.prototype.update = function () {
|
464
|
+
if (this.helper) {
|
465
|
+
if (this.isPlaying)
|
466
|
+
this.helper.update();
|
467
|
+
this.helper.visible = this.isPlaying;
|
468
|
+
}
|
469
|
+
if (this._needUpdateSpatialDistanceSettings) {
|
470
|
+
this.applySpatialDistanceSettings();
|
471
|
+
}
|
472
|
+
if (this.sound && !this.sound.isPlaying && this.shouldPlay && !this._hasEnded) {
|
473
|
+
this._hasEnded = true;
|
474
|
+
if (debug)
|
475
|
+
console.log("Audio clip ended", this.clip);
|
476
|
+
this.sound.dispatchEvent({ type: 'ended', target: this });
|
477
|
+
}
|
478
|
+
// this.gameObject.position.x = Math.sin(time.time) * 2;
|
479
|
+
// this.gameObject.position.z = Math.cos(time.time * .5) * 2;
|
480
|
+
};
|
481
|
+
AudioSource._didCallBeginWaitForUserInteraction = false;
|
482
|
+
AudioSource.callbacks = [];
|
483
|
+
AudioSource._userInteractionRegistered = false;
|
484
|
+
__decorate([
|
485
|
+
engine_serialization_decorator_js_1.serializable(URL)
|
486
|
+
], AudioSource.prototype, "clip");
|
487
|
+
__decorate([
|
488
|
+
engine_serialization_decorator_js_1.serializable()
|
489
|
+
], AudioSource.prototype, "playOnAwake");
|
490
|
+
__decorate([
|
491
|
+
engine_serialization_decorator_js_1.serializable()
|
492
|
+
], AudioSource.prototype, "loop");
|
493
|
+
__decorate([
|
494
|
+
engine_serialization_decorator_js_1.serializable()
|
495
|
+
], AudioSource.prototype, "spatialBlend");
|
496
|
+
__decorate([
|
497
|
+
engine_serialization_decorator_js_1.serializable()
|
498
|
+
], AudioSource.prototype, "minDistance");
|
499
|
+
__decorate([
|
500
|
+
engine_serialization_decorator_js_1.serializable()
|
501
|
+
], AudioSource.prototype, "maxDistance");
|
502
|
+
__decorate([
|
503
|
+
engine_serialization_decorator_js_1.serializable()
|
504
|
+
], AudioSource.prototype, "volume");
|
505
|
+
__decorate([
|
506
|
+
engine_serialization_decorator_js_1.serializable()
|
507
|
+
], AudioSource.prototype, "rollOffMode");
|
508
|
+
__decorate([
|
509
|
+
engine_serialization_decorator_js_1.serializable()
|
510
|
+
], AudioSource.prototype, "playInBackground");
|
511
|
+
return AudioSource;
|
512
|
+
}(Component_js_1.Behaviour));
|
513
|
+
exports.AudioSource = AudioSource;
|
@@ -0,0 +1,474 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __extends = (this && this.__extends) || (function () {
|
3
|
+
var extendStatics = function (d, b) {
|
4
|
+
extendStatics = Object.setPrototypeOf ||
|
5
|
+
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
6
|
+
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
7
|
+
return extendStatics(d, b);
|
8
|
+
};
|
9
|
+
return function (d, b) {
|
10
|
+
extendStatics(d, b);
|
11
|
+
function __() { this.constructor = d; }
|
12
|
+
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
13
|
+
};
|
14
|
+
})();
|
15
|
+
exports.__esModule = true;
|
16
|
+
exports.disposeStream = exports.NetworkedStreams = exports.PeerHandle = exports.ReceiveStreamEvent = exports.CallEndedEvent = exports.PeerEvent = void 0;
|
17
|
+
var engine_networking_js_1 = require("../engine/engine_networking.js");
|
18
|
+
var engine_networking_peer_js_1 = require("../engine/engine_networking_peer.js");
|
19
|
+
var three_1 = require("three");
|
20
|
+
var engine_utils_js_1 = require("./engine_utils.js");
|
21
|
+
var debug = engine_utils_js_1.getParam("debugnetworkingstreams");
|
22
|
+
var PeerEvent;
|
23
|
+
(function (PeerEvent) {
|
24
|
+
PeerEvent["Connected"] = "peer-user-connected";
|
25
|
+
PeerEvent["ReceiveStream"] = "receive-stream";
|
26
|
+
PeerEvent["CallEnded"] = "call-ended";
|
27
|
+
PeerEvent["Disconnected"] = "peer-user-disconnected";
|
28
|
+
PeerEvent["UserJoined"] = "user-joined";
|
29
|
+
})(PeerEvent = exports.PeerEvent || (exports.PeerEvent = {}));
|
30
|
+
var CallEndedEvent = /** @class */ (function () {
|
31
|
+
function CallEndedEvent(userId, direction) {
|
32
|
+
this.type = PeerEvent.CallEnded;
|
33
|
+
this.userId = userId;
|
34
|
+
this.direction = direction;
|
35
|
+
}
|
36
|
+
return CallEndedEvent;
|
37
|
+
}());
|
38
|
+
exports.CallEndedEvent = CallEndedEvent;
|
39
|
+
var ReceiveStreamEvent = /** @class */ (function () {
|
40
|
+
function ReceiveStreamEvent(stream, target) {
|
41
|
+
this.type = PeerEvent.ReceiveStream;
|
42
|
+
this.stream = stream;
|
43
|
+
this.target = target;
|
44
|
+
}
|
45
|
+
return ReceiveStreamEvent;
|
46
|
+
}());
|
47
|
+
exports.ReceiveStreamEvent = ReceiveStreamEvent;
|
48
|
+
var PeerUserConnectedModel = /** @class */ (function () {
|
49
|
+
function PeerUserConnectedModel(handle, peerId) {
|
50
|
+
// internal so server doesnt save it to persistent storage
|
51
|
+
this.dontSave = true;
|
52
|
+
this.guid = handle.id;
|
53
|
+
this.peerId = peerId;
|
54
|
+
}
|
55
|
+
return PeerUserConnectedModel;
|
56
|
+
}());
|
57
|
+
var CallDirection;
|
58
|
+
(function (CallDirection) {
|
59
|
+
CallDirection["Incoming"] = "incoming";
|
60
|
+
CallDirection["Outgoing"] = "outgoing";
|
61
|
+
})(CallDirection || (CallDirection = {}));
|
62
|
+
var CallHandle = /** @class */ (function (_super) {
|
63
|
+
__extends(CallHandle, _super);
|
64
|
+
function CallHandle(userId, call, direction) {
|
65
|
+
var _this = _super.call(this) || this;
|
66
|
+
_this._stream = null;
|
67
|
+
_this._isDisposed = false;
|
68
|
+
_this.userId = userId;
|
69
|
+
_this.call = call;
|
70
|
+
_this.direction = direction;
|
71
|
+
_this._stream = null;
|
72
|
+
call.on("stream", function (stream) {
|
73
|
+
if (debug)
|
74
|
+
console.log("Receive video", stream.getAudioTracks(), stream.getVideoTracks());
|
75
|
+
_this._stream = stream;
|
76
|
+
if (direction === CallDirection.Incoming) {
|
77
|
+
var args = new ReceiveStreamEvent(stream, _this);
|
78
|
+
_this.dispatchEvent(args);
|
79
|
+
}
|
80
|
+
});
|
81
|
+
call.on("close", function () {
|
82
|
+
_this.dispatchEvent(new CallEndedEvent(userId, direction));
|
83
|
+
});
|
84
|
+
return _this;
|
85
|
+
}
|
86
|
+
Object.defineProperty(CallHandle.prototype, "stream", {
|
87
|
+
get: function () { return this._stream; },
|
88
|
+
enumerable: false,
|
89
|
+
configurable: true
|
90
|
+
});
|
91
|
+
;
|
92
|
+
CallHandle.prototype.close = function () {
|
93
|
+
if (this._isDisposed)
|
94
|
+
return;
|
95
|
+
this._isDisposed = true;
|
96
|
+
this.call.close();
|
97
|
+
disposeStream(this._stream);
|
98
|
+
};
|
99
|
+
Object.defineProperty(CallHandle.prototype, "isOpen", {
|
100
|
+
get: function () {
|
101
|
+
var _a;
|
102
|
+
return ((_a = this.call.peerConnection) === null || _a === void 0 ? void 0 : _a.connectionState) === "connected"; // && this._stream?.active;
|
103
|
+
},
|
104
|
+
enumerable: false,
|
105
|
+
configurable: true
|
106
|
+
});
|
107
|
+
Object.defineProperty(CallHandle.prototype, "isOpening", {
|
108
|
+
get: function () {
|
109
|
+
var _a;
|
110
|
+
return ((_a = this.call.peerConnection) === null || _a === void 0 ? void 0 : _a.connectionState) === "connecting";
|
111
|
+
},
|
112
|
+
enumerable: false,
|
113
|
+
configurable: true
|
114
|
+
});
|
115
|
+
Object.defineProperty(CallHandle.prototype, "isClosed", {
|
116
|
+
get: function () {
|
117
|
+
return !this.isOpen;
|
118
|
+
},
|
119
|
+
enumerable: false,
|
120
|
+
configurable: true
|
121
|
+
});
|
122
|
+
return CallHandle;
|
123
|
+
}(three_1.EventDispatcher));
|
124
|
+
var PeerHandle = /** @class */ (function (_super) {
|
125
|
+
__extends(PeerHandle, _super);
|
126
|
+
function PeerHandle(context, id) {
|
127
|
+
var _this = _super.call(this) || this;
|
128
|
+
_this._incomingCalls = [];
|
129
|
+
_this._outgoingCalls = [];
|
130
|
+
_this._enabled = false;
|
131
|
+
_this._enabledPeer = false;
|
132
|
+
_this.onConnectRoomFn = _this.onConnectRoom.bind(_this);
|
133
|
+
// private onUserJoinedOrLeftRoomFn: Function = this.onUserJoinedOrLeftRoom.bind(this);
|
134
|
+
_this.onPeerConnectFn = _this.onPeerConnect.bind(_this);
|
135
|
+
_this.onPeerReceiveCallFn = _this.onPeerReceivingCall.bind(_this);
|
136
|
+
_this.context = context;
|
137
|
+
_this.id = id;
|
138
|
+
_this.setupPeer();
|
139
|
+
navigator["getUserMedia"] = (navigator["getUserMedia"] || navigator["webkitGetUserMedia"] ||
|
140
|
+
navigator["mozGetUserMedia"] || navigator["msGetUserMedia"]);
|
141
|
+
return _this;
|
142
|
+
}
|
143
|
+
PeerHandle.getOrCreate = function (context, guid) {
|
144
|
+
// if (id === undefined) {
|
145
|
+
// // randomId
|
146
|
+
// id = Math.random().toFixed(5);
|
147
|
+
// }
|
148
|
+
if (PeerHandle.instances.has(guid))
|
149
|
+
return PeerHandle.instances.get(guid);
|
150
|
+
var peer = new PeerHandle(context, guid);
|
151
|
+
PeerHandle.instances.set(guid, peer);
|
152
|
+
return peer;
|
153
|
+
};
|
154
|
+
PeerHandle.prototype.getMyPeerId = function () {
|
155
|
+
if (this.context.connection.connectionId)
|
156
|
+
return this.getPeerIdFromUserId(this.context.connection.connectionId);
|
157
|
+
return undefined;
|
158
|
+
};
|
159
|
+
PeerHandle.prototype.getPeerIdFromUserId = function (userConnectionId) {
|
160
|
+
// we build the peer id ourselves so we dont need to wait for peer to report it
|
161
|
+
return this.id + "-" + userConnectionId;
|
162
|
+
};
|
163
|
+
PeerHandle.prototype.getUserIdFromPeerId = function (peerId) {
|
164
|
+
return peerId.substring(this.id.length + 1);
|
165
|
+
};
|
166
|
+
PeerHandle.prototype.makeCall = function (peerId, stream) {
|
167
|
+
var _a;
|
168
|
+
var opts = { metadata: { userId: this.context.connection.connectionId } };
|
169
|
+
var call = (_a = this._peer) === null || _a === void 0 ? void 0 : _a.call(peerId, stream, opts);
|
170
|
+
if (call)
|
171
|
+
return this.registerCall(call, CallDirection.Outgoing);
|
172
|
+
return undefined;
|
173
|
+
};
|
174
|
+
Object.defineProperty(PeerHandle.prototype, "peer", {
|
175
|
+
get: function () { return this._peer; },
|
176
|
+
enumerable: false,
|
177
|
+
configurable: true
|
178
|
+
});
|
179
|
+
// private _connectionPeerIdMap : Map<string, string> = new Map();
|
180
|
+
PeerHandle.prototype.enable = function () {
|
181
|
+
if (this._enabled)
|
182
|
+
return;
|
183
|
+
this._enabled = true;
|
184
|
+
this.context.connection.beginListen(engine_networking_js_1.RoomEvents.JoinedRoom, this.onConnectRoomFn);
|
185
|
+
// this.context.connection.beginListen(RoomEvents.UserJoinedRoom, this.onUserJoinedOrLeftRoomFn);
|
186
|
+
// this.context.connection.beginListen(RoomEvents.UserLeftRoom, this.onUserJoinedOrLeftRoomFn);
|
187
|
+
this.subscribePeerEvents();
|
188
|
+
};
|
189
|
+
PeerHandle.prototype.disable = function () {
|
190
|
+
if (!this._enabled)
|
191
|
+
return;
|
192
|
+
this._enabled = false;
|
193
|
+
this.context.connection.stopListen(engine_networking_js_1.RoomEvents.JoinedRoom, this.onConnectRoomFn);
|
194
|
+
// this.context.connection.stopListen(RoomEvents.UserJoinedRoom, this.onUserJoinedOrLeftRoomFn);
|
195
|
+
// this.context.connection.stopListen(RoomEvents.UserLeftRoom, this.onUserJoinedOrLeftRoomFn);
|
196
|
+
this.unsubscribePeerEvents();
|
197
|
+
};
|
198
|
+
PeerHandle.prototype.onConnectRoom = function () {
|
199
|
+
this.setupPeer();
|
200
|
+
};
|
201
|
+
;
|
202
|
+
// private onUserJoinedOrLeftRoom(_: UserJoinedOrLeftRoomModel): void {
|
203
|
+
// };
|
204
|
+
PeerHandle.prototype.setupPeer = function () {
|
205
|
+
if (!this.context.connection.connectionId)
|
206
|
+
return;
|
207
|
+
if (this._enabledPeer)
|
208
|
+
return;
|
209
|
+
this._enabledPeer = true;
|
210
|
+
if (!this._peer) {
|
211
|
+
var peerId = this.getMyPeerId();
|
212
|
+
if (peerId)
|
213
|
+
this._peer = engine_networking_peer_js_1.getPeerjsInstance(peerId);
|
214
|
+
else
|
215
|
+
console.error("Failed to setup peerjs because we dont have a connection id", this.context.connection.connectionId);
|
216
|
+
}
|
217
|
+
if (this._enabled)
|
218
|
+
this.subscribePeerEvents();
|
219
|
+
};
|
220
|
+
PeerHandle.prototype.subscribePeerEvents = function () {
|
221
|
+
if (!this._peer)
|
222
|
+
return;
|
223
|
+
this._peer.on("open", this.onPeerConnectFn);
|
224
|
+
this._peer.on("call", this.onPeerReceiveCallFn);
|
225
|
+
// this.context.connection.beginListen(PeerEvent.Connected, this.onRemotePeerConnect.bind(this));
|
226
|
+
// TODO: make connection to all current active calls even if the user is not anymore in the needle room
|
227
|
+
};
|
228
|
+
PeerHandle.prototype.unsubscribePeerEvents = function () {
|
229
|
+
if (!this._peer)
|
230
|
+
return;
|
231
|
+
this._peer.off("open", this.onPeerConnectFn);
|
232
|
+
this._peer.off("call", this.onPeerReceiveCallFn);
|
233
|
+
// this.context.connection.stopListen(PeerEvent.Connected, this.onRemotePeerConnect.bind(this));
|
234
|
+
};
|
235
|
+
PeerHandle.prototype.onPeerConnect = function (id) {
|
236
|
+
if (debug)
|
237
|
+
console.log("Peer connected as", id);
|
238
|
+
this.context.connection.send(PeerEvent.Connected, new PeerUserConnectedModel(this, id));
|
239
|
+
};
|
240
|
+
PeerHandle.prototype.onPeerReceivingCall = function (call) {
|
241
|
+
call.answer();
|
242
|
+
this.registerCall(call, CallDirection.Incoming);
|
243
|
+
};
|
244
|
+
PeerHandle.prototype.registerCall = function (call, direction) {
|
245
|
+
var _this = this;
|
246
|
+
var meta = call.metadata;
|
247
|
+
if (!meta || !meta.userId) {
|
248
|
+
console.error("Missing call metadata", call);
|
249
|
+
}
|
250
|
+
var userId = meta.userId;
|
251
|
+
if (direction === CallDirection.Incoming && debug)
|
252
|
+
console.log("Receive call from", call.metadata);
|
253
|
+
else if (debug)
|
254
|
+
console.log("Make call to", call.metadata);
|
255
|
+
var arr = direction === CallDirection.Incoming ? this._incomingCalls : this._outgoingCalls;
|
256
|
+
var handle = new CallHandle(userId, call, direction);
|
257
|
+
arr.push(handle);
|
258
|
+
call.on("error", function (err) {
|
259
|
+
console.error("Call error", err);
|
260
|
+
});
|
261
|
+
call.on("close", function () {
|
262
|
+
if (debug)
|
263
|
+
console.log("Call ended", call.metadata);
|
264
|
+
call.close();
|
265
|
+
var index = arr.indexOf(handle);
|
266
|
+
if (index !== -1)
|
267
|
+
arr.splice(index, 1);
|
268
|
+
});
|
269
|
+
handle.addEventListener(PeerEvent.CallEnded, function (e) {
|
270
|
+
_this.dispatchEvent(e);
|
271
|
+
});
|
272
|
+
if (direction === CallDirection.Incoming) {
|
273
|
+
handle.addEventListener(PeerEvent.ReceiveStream, function (e) {
|
274
|
+
_this.dispatchEvent(e);
|
275
|
+
});
|
276
|
+
call.on("stream", function () {
|
277
|
+
// workaround for https://github.com/peers/peerjs/issues/636
|
278
|
+
var intervalCounter = 0;
|
279
|
+
var closeInterval = setInterval(function () {
|
280
|
+
var isFirstInterval = intervalCounter === 0;
|
281
|
+
if (!handle.isOpen && isFirstInterval) {
|
282
|
+
intervalCounter += 1;
|
283
|
+
clearInterval(closeInterval);
|
284
|
+
handle.close();
|
285
|
+
}
|
286
|
+
}, 2000);
|
287
|
+
});
|
288
|
+
}
|
289
|
+
return handle;
|
290
|
+
};
|
291
|
+
PeerHandle.instances = new Map();
|
292
|
+
return PeerHandle;
|
293
|
+
}(three_1.EventDispatcher));
|
294
|
+
exports.PeerHandle = PeerHandle;
|
295
|
+
// type UserVideoCall = {
|
296
|
+
// call: Peer.MediaConnection;
|
297
|
+
// stream: MediaStream;
|
298
|
+
// userId: string;
|
299
|
+
// }
|
300
|
+
// type IncomingStreamArgs = {
|
301
|
+
// stream: MediaStream;
|
302
|
+
// userId: string;
|
303
|
+
// }
|
304
|
+
var NetworkedStreams = /** @class */ (function (_super) {
|
305
|
+
__extends(NetworkedStreams, _super);
|
306
|
+
function NetworkedStreams(context, peer) {
|
307
|
+
var _this = _super.call(this) || this;
|
308
|
+
// private _receiveVideoStreamListeners: Array<(info: IncomingStreamArgs) => void> = [];
|
309
|
+
_this._sendingStreams = new Map();
|
310
|
+
// private onUserJoinedPeer = (evt) => {
|
311
|
+
// if (!this.context.connection.isConnected && evt.userId) {
|
312
|
+
// this.startCallWithUserIfNotAlready(evt.userId);
|
313
|
+
// }
|
314
|
+
// }
|
315
|
+
// When either we ourselves OR someone else is joining the room we want to make sure to re-establish all calls
|
316
|
+
// and if the user that joined is not yet receiving our video stream we want to start a stream with them
|
317
|
+
// https://github.com/needle-tools/needle-tiny/issues/697#issuecomment-1510425539
|
318
|
+
_this.onJoinedRoom = function (evt) {
|
319
|
+
if (debug)
|
320
|
+
console.log(evt.userId + " joined room and I'm currently sending " + _this._sendingStreams.size + " streams");
|
321
|
+
if (_this._sendingStreams.size > 0)
|
322
|
+
_this.updateSendingCalls();
|
323
|
+
};
|
324
|
+
_this.onReceiveStream = function (evt) {
|
325
|
+
if (debug)
|
326
|
+
console.log("RECEIVE VIDEO", evt);
|
327
|
+
_this.dispatchEvent({ type: PeerEvent.ReceiveStream, target: _this, stream: evt.stream, userId: evt.userId });
|
328
|
+
};
|
329
|
+
_this.onCallEnded = function (evt) {
|
330
|
+
_this.dispatchEvent(evt);
|
331
|
+
};
|
332
|
+
_this.onUserConnected = function (user) {
|
333
|
+
// console.log(this.peer.id, user.guid)
|
334
|
+
if (_this.peer.id === user.guid) {
|
335
|
+
if (debug)
|
336
|
+
console.log("USER CONNECTED", user.guid, user);
|
337
|
+
var stream = _this._sendingStreams.keys().next().value;
|
338
|
+
_this.peer.makeCall(user.peerId, stream);
|
339
|
+
}
|
340
|
+
};
|
341
|
+
_this.context = context;
|
342
|
+
_this.peer = peer;
|
343
|
+
return _this;
|
344
|
+
}
|
345
|
+
NetworkedStreams.create = function (comp) {
|
346
|
+
var peer = PeerHandle.getOrCreate(comp.context, comp.context.connection.connectionId);
|
347
|
+
return new NetworkedStreams(comp.context, peer);
|
348
|
+
};
|
349
|
+
NetworkedStreams.prototype.startSendingStream = function (stream) {
|
350
|
+
if (!this._sendingStreams.has(stream)) {
|
351
|
+
this._sendingStreams.set(stream, []);
|
352
|
+
this.updateSendingCalls();
|
353
|
+
}
|
354
|
+
;
|
355
|
+
};
|
356
|
+
NetworkedStreams.prototype.stopSendingStream = function (_steam) {
|
357
|
+
if (_steam) {
|
358
|
+
var calls = this._sendingStreams.get(_steam);
|
359
|
+
if (calls) {
|
360
|
+
if (debug)
|
361
|
+
console.log("Closing calls", calls);
|
362
|
+
for (var _i = 0, calls_1 = calls; _i < calls_1.length; _i++) {
|
363
|
+
var call = calls_1[_i];
|
364
|
+
call.close();
|
365
|
+
}
|
366
|
+
}
|
367
|
+
this._sendingStreams["delete"](_steam);
|
368
|
+
if (calls && debug)
|
369
|
+
console.log("Currently sending", this._sendingStreams);
|
370
|
+
}
|
371
|
+
};
|
372
|
+
// private onConnectRoomFn: Function = this.onConnectRoom.bind(this);
|
373
|
+
// private onUserConnectedFn: Function = this.onUserConnected.bind(this);
|
374
|
+
// private onUserLeftFn: Function = this.onUserLeft.bind(this);
|
375
|
+
NetworkedStreams.prototype.enable = function () {
|
376
|
+
this.peer.enable();
|
377
|
+
this.peer.addEventListener(PeerEvent.ReceiveStream, this.onReceiveStream);
|
378
|
+
//@ts-ignore
|
379
|
+
this.peer.addEventListener(PeerEvent.CallEnded, this.onCallEnded);
|
380
|
+
// this.peer.addEventListener(PeerEvent.UserJoined, this.onUserJoinedPeer);
|
381
|
+
this.context.connection.beginListen(PeerEvent.Connected, this.onUserConnected);
|
382
|
+
this.context.connection.beginListen(engine_networking_js_1.RoomEvents.JoinedRoom, this.onJoinedRoom);
|
383
|
+
this.context.connection.beginListen(engine_networking_js_1.RoomEvents.UserJoinedRoom, this.onJoinedRoom);
|
384
|
+
this.context.connection.beginListen(engine_networking_js_1.RoomEvents.UserLeftRoom, this.onUserLeft);
|
385
|
+
};
|
386
|
+
NetworkedStreams.prototype.disable = function () {
|
387
|
+
this.peer.disable();
|
388
|
+
this.peer.removeEventListener(PeerEvent.ReceiveStream, this.onReceiveStream);
|
389
|
+
//@ts-ignore
|
390
|
+
this.peer.removeEventListener(PeerEvent.CallEnded, this.onCallEnded);
|
391
|
+
// this.peer.removeEventListener(PeerEvent.UserJoined, this.onUserJoinedPeer);
|
392
|
+
this.context.connection.stopListen(PeerEvent.Connected, this.onUserConnected);
|
393
|
+
this.context.connection.stopListen(engine_networking_js_1.RoomEvents.JoinedRoom, this.onJoinedRoom);
|
394
|
+
this.context.connection.stopListen(engine_networking_js_1.RoomEvents.UserJoinedRoom, this.onJoinedRoom);
|
395
|
+
this.context.connection.stopListen(engine_networking_js_1.RoomEvents.UserLeftRoom, this.onUserLeft);
|
396
|
+
};
|
397
|
+
NetworkedStreams.prototype.onUserLeft = function (_) {
|
398
|
+
this.stopCallsToUsersThatAreNotInTheRoomAnymore();
|
399
|
+
};
|
400
|
+
NetworkedStreams.prototype.updateSendingCalls = function () {
|
401
|
+
var _a;
|
402
|
+
var startedNewCall = false;
|
403
|
+
var localUserId = this.context.connection.connectionId;
|
404
|
+
for (var _i = 0, _b = this._sendingStreams.keys(); _i < _b.length; _i++) {
|
405
|
+
var stream = _b[_i];
|
406
|
+
var calls = this._sendingStreams.get(stream) || [];
|
407
|
+
var _loop_1 = function (userId) {
|
408
|
+
if (userId === localUserId)
|
409
|
+
return "continue";
|
410
|
+
var existing = calls.find(function (c) { return c.userId === userId; });
|
411
|
+
if (!existing || ((_a = existing.stream) === null || _a === void 0 ? void 0 : _a.active) === false) {
|
412
|
+
if (debug)
|
413
|
+
console.log("Starting call to", userId, localUserId);
|
414
|
+
var handle = this_1.peer.makeCall(this_1.peer.getPeerIdFromUserId(userId), stream);
|
415
|
+
if (handle) {
|
416
|
+
startedNewCall = true;
|
417
|
+
calls.push(handle);
|
418
|
+
}
|
419
|
+
}
|
420
|
+
};
|
421
|
+
var this_1 = this;
|
422
|
+
for (var _c = 0, _d = this.context.connection.usersInRoom(); _c < _d.length; _c++) {
|
423
|
+
var userId = _d[_c];
|
424
|
+
_loop_1(userId);
|
425
|
+
}
|
426
|
+
this._sendingStreams.set(stream, calls);
|
427
|
+
}
|
428
|
+
this.stopCallsToUsersThatAreNotInTheRoomAnymore();
|
429
|
+
if (startedNewCall && debug) {
|
430
|
+
console.log("Currently sending", this._sendingStreams);
|
431
|
+
}
|
432
|
+
};
|
433
|
+
// private startCallWithUserIfNotAlready(userId: string) {
|
434
|
+
// for (const stream of this._sendingVideoStreams.keys()) {
|
435
|
+
// const calls = this._sendingVideoStreams.get(stream) || [];
|
436
|
+
// const existing = calls.find(c => c.userId === userId);
|
437
|
+
// if (!existing || existing.stream?.active === false) {
|
438
|
+
// if (debug) console.log("Starting call to", userId)
|
439
|
+
// const handle = this.peer.makeCall(this.peer.getPeerIdFromUserId(userId), stream);
|
440
|
+
// if (handle) {
|
441
|
+
// calls.push(handle);
|
442
|
+
// return true;
|
443
|
+
// }
|
444
|
+
// }
|
445
|
+
// }
|
446
|
+
// return false;
|
447
|
+
// }
|
448
|
+
NetworkedStreams.prototype.stopCallsToUsersThatAreNotInTheRoomAnymore = function () {
|
449
|
+
for (var _i = 0, _a = this._sendingStreams.keys(); _i < _a.length; _i++) {
|
450
|
+
var stream = _a[_i];
|
451
|
+
var calls = this._sendingStreams.get(stream);
|
452
|
+
if (!calls)
|
453
|
+
continue;
|
454
|
+
for (var i = calls.length - 1; i >= 0; i--) {
|
455
|
+
var call = calls[i];
|
456
|
+
if (!this.context.connection.userIsInRoom(call.userId)) {
|
457
|
+
call.close();
|
458
|
+
calls.splice(i, 1);
|
459
|
+
}
|
460
|
+
}
|
461
|
+
}
|
462
|
+
};
|
463
|
+
return NetworkedStreams;
|
464
|
+
}(three_1.EventDispatcher));
|
465
|
+
exports.NetworkedStreams = NetworkedStreams;
|
466
|
+
function disposeStream(str) {
|
467
|
+
if (!str)
|
468
|
+
return;
|
469
|
+
for (var _i = 0, _a = str.getTracks(); _i < _a.length; _i++) {
|
470
|
+
var cap = _a[_i];
|
471
|
+
cap.stop();
|
472
|
+
}
|
473
|
+
}
|
474
|
+
exports.disposeStream = disposeStream;
|
@@ -0,0 +1,490 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __extends = (this && this.__extends) || (function () {
|
3
|
+
var extendStatics = function (d, b) {
|
4
|
+
extendStatics = Object.setPrototypeOf ||
|
5
|
+
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
6
|
+
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
7
|
+
return extendStatics(d, b);
|
8
|
+
};
|
9
|
+
return function (d, b) {
|
10
|
+
extendStatics(d, b);
|
11
|
+
function __() { this.constructor = d; }
|
12
|
+
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
13
|
+
};
|
14
|
+
})();
|
15
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
16
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
17
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
18
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
19
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
20
|
+
};
|
21
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
22
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
23
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
24
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
25
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
26
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
27
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
28
|
+
});
|
29
|
+
};
|
30
|
+
var __generator = (this && this.__generator) || function (thisArg, body) {
|
31
|
+
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
32
|
+
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
33
|
+
function verb(n) { return function (v) { return step([n, v]); }; }
|
34
|
+
function step(op) {
|
35
|
+
if (f) throw new TypeError("Generator is already executing.");
|
36
|
+
while (_) try {
|
37
|
+
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
38
|
+
if (y = 0, t) op = [op[0] & 2, t.value];
|
39
|
+
switch (op[0]) {
|
40
|
+
case 0: case 1: t = op; break;
|
41
|
+
case 4: _.label++; return { value: op[1], done: false };
|
42
|
+
case 5: _.label++; y = op[1]; op = [0]; continue;
|
43
|
+
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
44
|
+
default:
|
45
|
+
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
46
|
+
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
47
|
+
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
48
|
+
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
49
|
+
if (t[2]) _.ops.pop();
|
50
|
+
_.trys.pop(); continue;
|
51
|
+
}
|
52
|
+
op = body.call(thisArg, _);
|
53
|
+
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
54
|
+
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
55
|
+
}
|
56
|
+
};
|
57
|
+
exports.__esModule = true;
|
58
|
+
exports.ScreenCapture = exports.ScreenCaptureMode = exports.ScreenCaptureDevice = void 0;
|
59
|
+
var Component_js_1 = require("./Component.js");
|
60
|
+
var VideoPlayer_js_1 = require("./VideoPlayer.js");
|
61
|
+
var engine_serialization_js_1 = require("../engine/engine_serialization.js");
|
62
|
+
var AudioSource_js_1 = require("./AudioSource.js");
|
63
|
+
var engine_utils_js_1 = require("../engine/engine_utils.js");
|
64
|
+
var index_js_1 = require("../engine/debug/index.js");
|
65
|
+
var engine_networking_streams_js_1 = require("../engine/engine_networking_streams.js");
|
66
|
+
var debug = engine_utils_js_1.getParam("debugscreensharing");
|
67
|
+
var ScreenCaptureDevice;
|
68
|
+
(function (ScreenCaptureDevice) {
|
69
|
+
ScreenCaptureDevice[ScreenCaptureDevice["Screen"] = 0] = "Screen";
|
70
|
+
ScreenCaptureDevice[ScreenCaptureDevice["Camera"] = 1] = "Camera";
|
71
|
+
/** Please note that canvas streaming might not work reliably on chrome: https://bugs.chromium.org/p/chromium/issues/detail?id=1156408 */
|
72
|
+
ScreenCaptureDevice[ScreenCaptureDevice["Canvas"] = 2] = "Canvas";
|
73
|
+
/** When using Microphone only the voice will be sent */
|
74
|
+
ScreenCaptureDevice[ScreenCaptureDevice["Microphone"] = 3] = "Microphone";
|
75
|
+
})(ScreenCaptureDevice = exports.ScreenCaptureDevice || (exports.ScreenCaptureDevice = {}));
|
76
|
+
var ScreenCaptureMode;
|
77
|
+
(function (ScreenCaptureMode) {
|
78
|
+
ScreenCaptureMode[ScreenCaptureMode["Idle"] = 0] = "Idle";
|
79
|
+
ScreenCaptureMode[ScreenCaptureMode["Sending"] = 1] = "Sending";
|
80
|
+
ScreenCaptureMode[ScreenCaptureMode["Receiving"] = 2] = "Receiving";
|
81
|
+
})(ScreenCaptureMode = exports.ScreenCaptureMode || (exports.ScreenCaptureMode = {}));
|
82
|
+
var ScreenCapture = /** @class */ (function (_super) {
|
83
|
+
__extends(ScreenCapture, _super);
|
84
|
+
function ScreenCapture() {
|
85
|
+
var _this = _super !== null && _super.apply(this, arguments) || this;
|
86
|
+
_this.allowStartOnClick = true;
|
87
|
+
// TODO: make this a property
|
88
|
+
/** Note: this can not be changed while streaming */
|
89
|
+
_this.device = ScreenCaptureDevice.Screen;
|
90
|
+
_this._requestOpen = false;
|
91
|
+
_this._currentStream = null;
|
92
|
+
_this._currentMode = ScreenCaptureMode.Idle;
|
93
|
+
_this.onReceiveStream = function (evt) {
|
94
|
+
var _a;
|
95
|
+
if (((_a = evt.stream) === null || _a === void 0 ? void 0 : _a.active) !== true)
|
96
|
+
return;
|
97
|
+
_this.setStream(evt.stream, ScreenCaptureMode.Receiving);
|
98
|
+
};
|
99
|
+
_this.onCallEnded = function (_evt) {
|
100
|
+
if (debug)
|
101
|
+
console.log("CALL ENDED", _this.isReceiving, _this === null || _this === void 0 ? void 0 : _this.screenspace);
|
102
|
+
if (_this.isReceiving)
|
103
|
+
_this.screenspace = false;
|
104
|
+
};
|
105
|
+
return _this;
|
106
|
+
// private _cameraSelectionWindow : Window | null = null;
|
107
|
+
// private openWindowToSelectCamera(){
|
108
|
+
// }
|
109
|
+
}
|
110
|
+
ScreenCapture.prototype.onPointerEnter = function () {
|
111
|
+
if (!this.allowStartOnClick)
|
112
|
+
return;
|
113
|
+
this.context.input.setCursorPointer();
|
114
|
+
};
|
115
|
+
ScreenCapture.prototype.onPointerExit = function () {
|
116
|
+
if (!this.allowStartOnClick)
|
117
|
+
return;
|
118
|
+
this.context.input.setCursorNormal();
|
119
|
+
};
|
120
|
+
ScreenCapture.prototype.onPointerClick = function (evt) {
|
121
|
+
var _a;
|
122
|
+
if (!this.allowStartOnClick)
|
123
|
+
return;
|
124
|
+
if (evt && evt.pointerId !== 0)
|
125
|
+
return;
|
126
|
+
if (this.context.connection.isInRoom === false)
|
127
|
+
return;
|
128
|
+
if (this.isReceiving && ((_a = this.videoPlayer) === null || _a === void 0 ? void 0 : _a.isPlaying)) {
|
129
|
+
if (this.videoPlayer)
|
130
|
+
this.videoPlayer.screenspace = !this.videoPlayer.screenspace;
|
131
|
+
return;
|
132
|
+
}
|
133
|
+
if (this.isSending) {
|
134
|
+
this.close();
|
135
|
+
return;
|
136
|
+
}
|
137
|
+
this.share();
|
138
|
+
};
|
139
|
+
Object.defineProperty(ScreenCapture.prototype, "videoPlayer", {
|
140
|
+
get: function () { return this._videoPlayer; },
|
141
|
+
set: function (val) {
|
142
|
+
if (this._videoPlayer && (this.isSending || this.isReceiving)) {
|
143
|
+
this._videoPlayer.stop();
|
144
|
+
}
|
145
|
+
this._videoPlayer = val;
|
146
|
+
if (this._videoPlayer && this._currentStream && (this.isSending || this.isReceiving)) {
|
147
|
+
this._videoPlayer.setVideo(this._currentStream);
|
148
|
+
}
|
149
|
+
},
|
150
|
+
enumerable: false,
|
151
|
+
configurable: true
|
152
|
+
});
|
153
|
+
Object.defineProperty(ScreenCapture.prototype, "screenspace", {
|
154
|
+
get: function () { var _a, _b; return (_b = (_a = this.videoPlayer) === null || _a === void 0 ? void 0 : _a.screenspace) !== null && _b !== void 0 ? _b : false; },
|
155
|
+
set: function (v) { if (this.videoPlayer)
|
156
|
+
this.videoPlayer.screenspace = v; },
|
157
|
+
enumerable: false,
|
158
|
+
configurable: true
|
159
|
+
});
|
160
|
+
Object.defineProperty(ScreenCapture.prototype, "currentScream", {
|
161
|
+
get: function () {
|
162
|
+
return this._currentStream;
|
163
|
+
},
|
164
|
+
enumerable: false,
|
165
|
+
configurable: true
|
166
|
+
});
|
167
|
+
Object.defineProperty(ScreenCapture.prototype, "currentMode", {
|
168
|
+
get: function () {
|
169
|
+
return this._currentMode;
|
170
|
+
},
|
171
|
+
enumerable: false,
|
172
|
+
configurable: true
|
173
|
+
});
|
174
|
+
Object.defineProperty(ScreenCapture.prototype, "isSending", {
|
175
|
+
get: function () {
|
176
|
+
var _a;
|
177
|
+
return ((_a = this._currentStream) === null || _a === void 0 ? void 0 : _a.active) && this._currentMode === ScreenCaptureMode.Sending;
|
178
|
+
},
|
179
|
+
enumerable: false,
|
180
|
+
configurable: true
|
181
|
+
});
|
182
|
+
Object.defineProperty(ScreenCapture.prototype, "isReceiving", {
|
183
|
+
get: function () {
|
184
|
+
if (this._currentMode === ScreenCaptureMode.Receiving) {
|
185
|
+
if (!this._currentStream || this._currentStream.active === false)
|
186
|
+
return false;
|
187
|
+
// if any track is still live consider it active
|
188
|
+
var tracks = this._currentStream.getTracks();
|
189
|
+
for (var _i = 0, tracks_1 = tracks; _i < tracks_1.length; _i++) {
|
190
|
+
var track = tracks_1[_i];
|
191
|
+
if (track.readyState === "live")
|
192
|
+
return true;
|
193
|
+
}
|
194
|
+
}
|
195
|
+
return false;
|
196
|
+
},
|
197
|
+
enumerable: false,
|
198
|
+
configurable: true
|
199
|
+
});
|
200
|
+
Object.defineProperty(ScreenCapture.prototype, "requiresVideoPlayer", {
|
201
|
+
get: function () { return this.device !== ScreenCaptureDevice.Microphone; },
|
202
|
+
enumerable: false,
|
203
|
+
configurable: true
|
204
|
+
});
|
205
|
+
ScreenCapture.prototype.awake = function () {
|
206
|
+
var _this = this;
|
207
|
+
if (debug)
|
208
|
+
console.log("Screensharing", this.name, this);
|
209
|
+
AudioSource_js_1.AudioSource.registerWaitForAllowAudio(function () {
|
210
|
+
if (_this.videoPlayer && _this._currentStream && _this._currentMode === ScreenCaptureMode.Receiving) {
|
211
|
+
_this.videoPlayer.playInBackground = true;
|
212
|
+
_this.videoPlayer.setVideo(_this._currentStream);
|
213
|
+
}
|
214
|
+
});
|
215
|
+
var handle = engine_networking_streams_js_1.PeerHandle.getOrCreate(this.context, this.guid);
|
216
|
+
this._net = new engine_networking_streams_js_1.NetworkedStreams(this.context, handle);
|
217
|
+
};
|
218
|
+
ScreenCapture.prototype.onEnable = function () {
|
219
|
+
var _a, _b, _c;
|
220
|
+
(_a = this._net) === null || _a === void 0 ? void 0 : _a.enable();
|
221
|
+
//@ts-ignore
|
222
|
+
(_b = this._net) === null || _b === void 0 ? void 0 : _b.addEventListener(engine_networking_streams_js_1.PeerEvent.ReceiveStream, this.onReceiveStream);
|
223
|
+
//@ts-ignore
|
224
|
+
(_c = this._net) === null || _c === void 0 ? void 0 : _c.addEventListener(engine_networking_streams_js_1.PeerEvent.CallEnded, this.onCallEnded);
|
225
|
+
};
|
226
|
+
ScreenCapture.prototype.onDisable = function () {
|
227
|
+
var _a, _b, _c;
|
228
|
+
//@ts-ignore
|
229
|
+
(_a = this._net) === null || _a === void 0 ? void 0 : _a.removeEventListener(engine_networking_streams_js_1.PeerEvent.ReceiveStream, this.onReceiveStream);
|
230
|
+
//@ts-ignore
|
231
|
+
(_b = this._net) === null || _b === void 0 ? void 0 : _b.removeEventListener(engine_networking_streams_js_1.PeerEvent.CallEnded, this.onCallEnded);
|
232
|
+
(_c = this._net) === null || _c === void 0 ? void 0 : _c.disable();
|
233
|
+
this.close();
|
234
|
+
};
|
235
|
+
/** Call to begin screensharing */
|
236
|
+
ScreenCapture.prototype.share = function (opts) {
|
237
|
+
var _a, _b;
|
238
|
+
return __awaiter(this, void 0, void 0, function () {
|
239
|
+
var settings, displayMediaOptions, videoOptions, _c, myVideo, fps, stream, myStream, err_1;
|
240
|
+
return __generator(this, function (_d) {
|
241
|
+
switch (_d.label) {
|
242
|
+
case 0:
|
243
|
+
if (opts === null || opts === void 0 ? void 0 : opts.device)
|
244
|
+
this.device = opts.device;
|
245
|
+
if (!this.videoPlayer && this.requiresVideoPlayer) {
|
246
|
+
if (!this._videoPlayer) {
|
247
|
+
this._videoPlayer = (_a = Component_js_1.GameObject.getComponent(this.gameObject, VideoPlayer_js_1.VideoPlayer)) !== null && _a !== void 0 ? _a : undefined;
|
248
|
+
}
|
249
|
+
if (!this.videoPlayer) {
|
250
|
+
console.warn("Can not share video without a videoPlayer assigned");
|
251
|
+
return [2 /*return*/];
|
252
|
+
}
|
253
|
+
}
|
254
|
+
this._requestOpen = true;
|
255
|
+
_d.label = 1;
|
256
|
+
case 1:
|
257
|
+
_d.trys.push([1, 9, , 10]);
|
258
|
+
settings = (_b = opts === null || opts === void 0 ? void 0 : opts.constraints) !== null && _b !== void 0 ? _b : {
|
259
|
+
echoCancellation: true,
|
260
|
+
autoGainControl: false
|
261
|
+
};
|
262
|
+
displayMediaOptions = {
|
263
|
+
video: settings,
|
264
|
+
audio: settings
|
265
|
+
};
|
266
|
+
videoOptions = displayMediaOptions.video;
|
267
|
+
if (videoOptions !== undefined && typeof videoOptions !== "boolean") {
|
268
|
+
// Set default video settings
|
269
|
+
if (!videoOptions.width)
|
270
|
+
videoOptions.width = { max: 1920 };
|
271
|
+
if (!videoOptions.height)
|
272
|
+
videoOptions.height = { max: 1920 };
|
273
|
+
if (!videoOptions.aspectRatio)
|
274
|
+
videoOptions.aspectRatio = { ideal: 1.7777777778 };
|
275
|
+
if (!videoOptions.frameRate)
|
276
|
+
videoOptions.frameRate = { ideal: 24 };
|
277
|
+
if (!videoOptions.facingMode)
|
278
|
+
videoOptions.facingMode = { ideal: "user" };
|
279
|
+
}
|
280
|
+
_c = this.device;
|
281
|
+
switch (_c) {
|
282
|
+
case ScreenCaptureDevice.Camera: return [3 /*break*/, 2];
|
283
|
+
case ScreenCaptureDevice.Screen: return [3 /*break*/, 3];
|
284
|
+
case ScreenCaptureDevice.Canvas: return [3 /*break*/, 5];
|
285
|
+
case ScreenCaptureDevice.Microphone: return [3 /*break*/, 6];
|
286
|
+
}
|
287
|
+
return [3 /*break*/, 8];
|
288
|
+
case 2:
|
289
|
+
this.tryShareUserCamera(displayMediaOptions, opts);
|
290
|
+
return [3 /*break*/, 8];
|
291
|
+
case 3:
|
292
|
+
if (!navigator.mediaDevices.getDisplayMedia) {
|
293
|
+
console.error("No getDisplayMedia support");
|
294
|
+
return [2 /*return*/];
|
295
|
+
}
|
296
|
+
return [4 /*yield*/, navigator.mediaDevices.getDisplayMedia(displayMediaOptions)];
|
297
|
+
case 4:
|
298
|
+
myVideo = _d.sent();
|
299
|
+
if (this._requestOpen) {
|
300
|
+
this.setStream(myVideo, ScreenCaptureMode.Sending);
|
301
|
+
}
|
302
|
+
else
|
303
|
+
engine_networking_streams_js_1.disposeStream(myVideo);
|
304
|
+
return [3 /*break*/, 8];
|
305
|
+
case 5:
|
306
|
+
fps = 0;
|
307
|
+
stream = this.context.renderer.domElement.captureStream(fps);
|
308
|
+
this.setStream(stream, ScreenCaptureMode.Sending);
|
309
|
+
return [3 /*break*/, 8];
|
310
|
+
case 6:
|
311
|
+
if (!navigator.mediaDevices.getDisplayMedia) {
|
312
|
+
console.error("No getDisplayMedia support");
|
313
|
+
return [2 /*return*/];
|
314
|
+
}
|
315
|
+
displayMediaOptions.video = false;
|
316
|
+
return [4 /*yield*/, navigator.mediaDevices.getUserMedia(displayMediaOptions)];
|
317
|
+
case 7:
|
318
|
+
myStream = _d.sent();
|
319
|
+
if (this._requestOpen) {
|
320
|
+
this.setStream(myStream, ScreenCaptureMode.Sending);
|
321
|
+
}
|
322
|
+
else
|
323
|
+
engine_networking_streams_js_1.disposeStream(myStream);
|
324
|
+
return [3 /*break*/, 8];
|
325
|
+
case 8: return [3 /*break*/, 10];
|
326
|
+
case 9:
|
327
|
+
err_1 = _d.sent();
|
328
|
+
if (err_1.name === "NotAllowedError") {
|
329
|
+
// user cancelled stream selection
|
330
|
+
console.log("Selection cancelled");
|
331
|
+
this._requestOpen = false;
|
332
|
+
return [2 /*return*/];
|
333
|
+
}
|
334
|
+
console.error("Error opening video", err_1);
|
335
|
+
return [3 /*break*/, 10];
|
336
|
+
case 10: return [2 /*return*/];
|
337
|
+
}
|
338
|
+
});
|
339
|
+
});
|
340
|
+
};
|
341
|
+
ScreenCapture.prototype.close = function () {
|
342
|
+
var _a;
|
343
|
+
this._requestOpen = false;
|
344
|
+
if (this._currentStream) {
|
345
|
+
if (debug)
|
346
|
+
console.warn("Close current stream / disposing resources, stream was active?", this._currentStream.active);
|
347
|
+
(_a = this._net) === null || _a === void 0 ? void 0 : _a.stopSendingStream(this._currentStream);
|
348
|
+
engine_networking_streams_js_1.disposeStream(this._currentStream);
|
349
|
+
this._currentMode = ScreenCaptureMode.Idle;
|
350
|
+
this._currentStream = null;
|
351
|
+
}
|
352
|
+
};
|
353
|
+
ScreenCapture.prototype.setStream = function (stream, mode) {
|
354
|
+
var _this = this;
|
355
|
+
var _a, _b, _c;
|
356
|
+
if (stream === this._currentStream)
|
357
|
+
return;
|
358
|
+
this.close();
|
359
|
+
if (!stream)
|
360
|
+
return;
|
361
|
+
this._currentStream = stream;
|
362
|
+
this._requestOpen = true;
|
363
|
+
this._currentMode = mode;
|
364
|
+
var isVideoStream = this.device !== ScreenCaptureDevice.Microphone;
|
365
|
+
var isSending = mode === ScreenCaptureMode.Sending;
|
366
|
+
if (isVideoStream) {
|
367
|
+
if (this._videoPlayer)
|
368
|
+
this._videoPlayer.setVideo(stream);
|
369
|
+
else
|
370
|
+
console.error("No video player assigned for video stream");
|
371
|
+
}
|
372
|
+
else {
|
373
|
+
if (!this._audioSource) {
|
374
|
+
this._audioSource = new AudioSource_js_1.AudioSource();
|
375
|
+
this._audioSource.spatialBlend = 0;
|
376
|
+
this._audioSource.volume = 1;
|
377
|
+
this.gameObject.addComponent(this._audioSource);
|
378
|
+
}
|
379
|
+
if (!isSending) {
|
380
|
+
console.log("PLAY", stream.getAudioTracks());
|
381
|
+
this._audioSource.volume = 1;
|
382
|
+
(_a = this._audioSource) === null || _a === void 0 ? void 0 : _a.play(stream);
|
383
|
+
}
|
384
|
+
}
|
385
|
+
if (isSending) {
|
386
|
+
(_b = this._net) === null || _b === void 0 ? void 0 : _b.startSendingStream(stream);
|
387
|
+
}
|
388
|
+
// Mute audio for the video we are sending
|
389
|
+
if (isSending) {
|
390
|
+
if (this._videoPlayer)
|
391
|
+
this._videoPlayer.muted = true;
|
392
|
+
(_c = this._audioSource) === null || _c === void 0 ? void 0 : _c.stop();
|
393
|
+
}
|
394
|
+
var _loop_1 = function (track) {
|
395
|
+
track.addEventListener("ended", function () {
|
396
|
+
if (debug)
|
397
|
+
console.log("Track ended", track);
|
398
|
+
_this.close();
|
399
|
+
});
|
400
|
+
if (debug) {
|
401
|
+
if (track.kind === "video") {
|
402
|
+
if (isSending)
|
403
|
+
console.log("Video →", track.getSettings());
|
404
|
+
else
|
405
|
+
console.log("Video ←", track.getSettings());
|
406
|
+
}
|
407
|
+
}
|
408
|
+
};
|
409
|
+
for (var _i = 0, _d = stream.getTracks(); _i < _d.length; _i++) {
|
410
|
+
var track = _d[_i];
|
411
|
+
_loop_1(track);
|
412
|
+
}
|
413
|
+
};
|
414
|
+
ScreenCapture.prototype.tryShareUserCamera = function (opts, options) {
|
415
|
+
var _a;
|
416
|
+
return __awaiter(this, void 0, void 0, function () {
|
417
|
+
var devices, _i, devices_1, dev, id, useDevice, userMedia, err_2;
|
418
|
+
return __generator(this, function (_b) {
|
419
|
+
switch (_b.label) {
|
420
|
+
case 0: return [4 /*yield*/, navigator.mediaDevices.enumerateDevices()];
|
421
|
+
case 1:
|
422
|
+
devices = (_b.sent()).filter(function (d) { return d.kind === "videoinput"; });
|
423
|
+
if (debug)
|
424
|
+
console.log("Request camera", devices);
|
425
|
+
_i = 0, devices_1 = devices;
|
426
|
+
_b.label = 2;
|
427
|
+
case 2:
|
428
|
+
if (!(_i < devices_1.length)) return [3 /*break*/, 7];
|
429
|
+
dev = devices_1[_i];
|
430
|
+
_b.label = 3;
|
431
|
+
case 3:
|
432
|
+
_b.trys.push([3, 5, , 6]);
|
433
|
+
if (!this._requestOpen)
|
434
|
+
return [3 /*break*/, 7];
|
435
|
+
if (dev.kind !== "videoinput")
|
436
|
+
return [3 /*break*/, 6];
|
437
|
+
id = dev.deviceId;
|
438
|
+
if ((options === null || options === void 0 ? void 0 : options.deviceId) !== undefined) {
|
439
|
+
if (id !== options.deviceId)
|
440
|
+
return [3 /*break*/, 6];
|
441
|
+
}
|
442
|
+
useDevice = (_a = options === null || options === void 0 ? void 0 : options.deviceFilter) === null || _a === void 0 ? void 0 : _a.call(this, dev);
|
443
|
+
if (useDevice === false)
|
444
|
+
return [3 /*break*/, 6];
|
445
|
+
if (opts.video !== false) {
|
446
|
+
if (typeof opts.video === "undefined" || typeof opts.video === "boolean") {
|
447
|
+
opts.video = {};
|
448
|
+
}
|
449
|
+
opts.video.deviceId = id;
|
450
|
+
}
|
451
|
+
return [4 /*yield*/, navigator.mediaDevices.getUserMedia(opts)];
|
452
|
+
case 4:
|
453
|
+
userMedia = _b.sent();
|
454
|
+
if (this._requestOpen) {
|
455
|
+
this.setStream(userMedia, ScreenCaptureMode.Sending);
|
456
|
+
}
|
457
|
+
else
|
458
|
+
engine_networking_streams_js_1.disposeStream(userMedia);
|
459
|
+
if (debug)
|
460
|
+
console.log("Selected camera", dev);
|
461
|
+
return [3 /*break*/, 7];
|
462
|
+
case 5:
|
463
|
+
err_2 = _b.sent();
|
464
|
+
// First message is firefox, second is chrome when the video source is already in use by another app
|
465
|
+
if (err_2.message === "Failed to allocate videosource" || err_2.message === "Could not start video source") {
|
466
|
+
index_js_1.showBalloonWarning("Failed to start video: Try another camera (Code " + err_2.code + ")");
|
467
|
+
console.warn(err_2);
|
468
|
+
return [3 /*break*/, 6];
|
469
|
+
}
|
470
|
+
else {
|
471
|
+
console.error("Failed to get user media", err_2.message, err_2.code, err_2);
|
472
|
+
}
|
473
|
+
return [3 /*break*/, 6];
|
474
|
+
case 6:
|
475
|
+
_i++;
|
476
|
+
return [3 /*break*/, 2];
|
477
|
+
case 7: return [2 /*return*/];
|
478
|
+
}
|
479
|
+
});
|
480
|
+
});
|
481
|
+
};
|
482
|
+
__decorate([
|
483
|
+
engine_serialization_js_1.serializable(VideoPlayer_js_1.VideoPlayer)
|
484
|
+
], ScreenCapture.prototype, "videoPlayer");
|
485
|
+
__decorate([
|
486
|
+
engine_serialization_js_1.serializable()
|
487
|
+
], ScreenCapture.prototype, "device");
|
488
|
+
return ScreenCapture;
|
489
|
+
}(Component_js_1.Behaviour));
|
490
|
+
exports.ScreenCapture = ScreenCapture;
|
@@ -0,0 +1,888 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __extends = (this && this.__extends) || (function () {
|
3
|
+
var extendStatics = function (d, b) {
|
4
|
+
extendStatics = Object.setPrototypeOf ||
|
5
|
+
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
6
|
+
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
7
|
+
return extendStatics(d, b);
|
8
|
+
};
|
9
|
+
return function (d, b) {
|
10
|
+
extendStatics(d, b);
|
11
|
+
function __() { this.constructor = d; }
|
12
|
+
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
13
|
+
};
|
14
|
+
})();
|
15
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
16
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
17
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
18
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
19
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
20
|
+
};
|
21
|
+
var __generator = (this && this.__generator) || function (thisArg, body) {
|
22
|
+
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
23
|
+
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
24
|
+
function verb(n) { return function (v) { return step([n, v]); }; }
|
25
|
+
function step(op) {
|
26
|
+
if (f) throw new TypeError("Generator is already executing.");
|
27
|
+
while (_) try {
|
28
|
+
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
29
|
+
if (y = 0, t) op = [op[0] & 2, t.value];
|
30
|
+
switch (op[0]) {
|
31
|
+
case 0: case 1: t = op; break;
|
32
|
+
case 4: _.label++; return { value: op[1], done: false };
|
33
|
+
case 5: _.label++; y = op[1]; op = [0]; continue;
|
34
|
+
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
35
|
+
default:
|
36
|
+
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
37
|
+
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
38
|
+
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
39
|
+
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
40
|
+
if (t[2]) _.ops.pop();
|
41
|
+
_.trys.pop(); continue;
|
42
|
+
}
|
43
|
+
op = body.call(thisArg, _);
|
44
|
+
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
45
|
+
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
46
|
+
}
|
47
|
+
};
|
48
|
+
exports.__esModule = true;
|
49
|
+
exports.VideoPlayer = exports.VideoRenderMode = exports.VideoAudioOutputMode = exports.VideoSource = exports.AspectMode = void 0;
|
50
|
+
var Component_js_1 = require("./Component.js");
|
51
|
+
var engine_serialization_decorator_js_1 = require("../engine/engine_serialization_decorator.js");
|
52
|
+
var three_1 = require("three");
|
53
|
+
var engine_input_utils_js_1 = require("../engine/engine_input_utils.js");
|
54
|
+
var engine_utils_js_1 = require("../engine/engine_utils.js");
|
55
|
+
var Renderer_js_1 = require("./Renderer.js");
|
56
|
+
var engine_three_utils_js_1 = require("../engine/engine_three_utils.js");
|
57
|
+
var engine_create_objects_js_1 = require("../engine/engine_create_objects.js");
|
58
|
+
var index_js_1 = require("../engine/debug/index.js");
|
59
|
+
var debug = engine_utils_js_1.getParam("debugvideo");
|
60
|
+
var AspectMode;
|
61
|
+
(function (AspectMode) {
|
62
|
+
AspectMode[AspectMode["None"] = 0] = "None";
|
63
|
+
AspectMode[AspectMode["AdjustHeight"] = 1] = "AdjustHeight";
|
64
|
+
AspectMode[AspectMode["AdjustWidth"] = 2] = "AdjustWidth";
|
65
|
+
})(AspectMode = exports.AspectMode || (exports.AspectMode = {}));
|
66
|
+
var VideoSource;
|
67
|
+
(function (VideoSource) {
|
68
|
+
/// <summary>
|
69
|
+
/// <para>Use the current clip as the video content source.</para>
|
70
|
+
/// </summary>
|
71
|
+
VideoSource[VideoSource["VideoClip"] = 0] = "VideoClip";
|
72
|
+
/// <summary>
|
73
|
+
/// <para>Use the current URL as the video content source.</para>
|
74
|
+
/// </summary>
|
75
|
+
VideoSource[VideoSource["Url"] = 1] = "Url";
|
76
|
+
})(VideoSource = exports.VideoSource || (exports.VideoSource = {}));
|
77
|
+
var VideoAudioOutputMode;
|
78
|
+
(function (VideoAudioOutputMode) {
|
79
|
+
VideoAudioOutputMode[VideoAudioOutputMode["None"] = 0] = "None";
|
80
|
+
VideoAudioOutputMode[VideoAudioOutputMode["AudioSource"] = 1] = "AudioSource";
|
81
|
+
VideoAudioOutputMode[VideoAudioOutputMode["Direct"] = 2] = "Direct";
|
82
|
+
VideoAudioOutputMode[VideoAudioOutputMode["APIOnly"] = 3] = "APIOnly";
|
83
|
+
})(VideoAudioOutputMode = exports.VideoAudioOutputMode || (exports.VideoAudioOutputMode = {}));
|
84
|
+
var VideoRenderMode;
|
85
|
+
(function (VideoRenderMode) {
|
86
|
+
VideoRenderMode[VideoRenderMode["CameraFarPlane"] = 0] = "CameraFarPlane";
|
87
|
+
VideoRenderMode[VideoRenderMode["CameraNearPlane"] = 1] = "CameraNearPlane";
|
88
|
+
VideoRenderMode[VideoRenderMode["RenderTexture"] = 2] = "RenderTexture";
|
89
|
+
VideoRenderMode[VideoRenderMode["MaterialOverride"] = 3] = "MaterialOverride";
|
90
|
+
})(VideoRenderMode = exports.VideoRenderMode || (exports.VideoRenderMode = {}));
|
91
|
+
var VideoPlayer = /** @class */ (function (_super) {
|
92
|
+
__extends(VideoPlayer, _super);
|
93
|
+
function VideoPlayer() {
|
94
|
+
var _this = _super.call(this) || this;
|
95
|
+
_this.playOnAwake = true;
|
96
|
+
_this.aspectMode = AspectMode.None;
|
97
|
+
_this.clip = null;
|
98
|
+
_this.time = 0;
|
99
|
+
_this._playbackSpeed = 1;
|
100
|
+
_this._isLooping = false;
|
101
|
+
_this._muted = false;
|
102
|
+
_this._audioOutputMode = VideoAudioOutputMode.Direct;
|
103
|
+
/** Set this to false to pause video playback while the tab is not active */
|
104
|
+
_this.playInBackground = true;
|
105
|
+
_this._crossOrigin = "anonymous";
|
106
|
+
// set a default src, this should not be undefined
|
107
|
+
_this.source = VideoSource.Url;
|
108
|
+
_this.url = null;
|
109
|
+
_this._videoElement = null;
|
110
|
+
_this._videoTexture = null;
|
111
|
+
_this._videoMaterial = null;
|
112
|
+
_this._isPlaying = false;
|
113
|
+
_this.wasPlaying = false;
|
114
|
+
_this.visibilityChanged = function (_) {
|
115
|
+
switch (document.visibilityState) {
|
116
|
+
case "hidden":
|
117
|
+
if (!_this.playInBackground) {
|
118
|
+
_this.wasPlaying = _this._isPlaying;
|
119
|
+
_this.pause();
|
120
|
+
}
|
121
|
+
break;
|
122
|
+
case "visible":
|
123
|
+
if (_this.wasPlaying && !_this._isPlaying)
|
124
|
+
_this.play();
|
125
|
+
break;
|
126
|
+
}
|
127
|
+
};
|
128
|
+
_this._receivedInput = false;
|
129
|
+
_this._overlay = null;
|
130
|
+
_this._updateAspectRoutineId = -1;
|
131
|
+
engine_input_utils_js_1.awaitInput(function () {
|
132
|
+
_this._receivedInput = true;
|
133
|
+
_this.updateVideoElementSettings();
|
134
|
+
});
|
135
|
+
_this._targetObjects = [];
|
136
|
+
if (engine_utils_js_1.getParam("videoscreenspace")) {
|
137
|
+
window.addEventListener("keydown", function (evt) {
|
138
|
+
if (evt.key === "f") {
|
139
|
+
_this.screenspace = !_this.screenspace;
|
140
|
+
}
|
141
|
+
});
|
142
|
+
}
|
143
|
+
return _this;
|
144
|
+
}
|
145
|
+
Object.defineProperty(VideoPlayer.prototype, "playbackSpeed", {
|
146
|
+
get: function () {
|
147
|
+
var _a, _b;
|
148
|
+
return (_b = (_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.playbackRate) !== null && _b !== void 0 ? _b : this._playbackSpeed;
|
149
|
+
},
|
150
|
+
set: function (val) {
|
151
|
+
this._playbackSpeed = val;
|
152
|
+
if (this._videoElement)
|
153
|
+
this._videoElement.playbackRate = val;
|
154
|
+
},
|
155
|
+
enumerable: false,
|
156
|
+
configurable: true
|
157
|
+
});
|
158
|
+
Object.defineProperty(VideoPlayer.prototype, "isLooping", {
|
159
|
+
get: function () {
|
160
|
+
var _a, _b;
|
161
|
+
return (_b = (_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.loop) !== null && _b !== void 0 ? _b : this._isLooping;
|
162
|
+
},
|
163
|
+
set: function (val) {
|
164
|
+
this._isLooping = val;
|
165
|
+
if (this._videoElement)
|
166
|
+
this._videoElement.loop = val;
|
167
|
+
},
|
168
|
+
enumerable: false,
|
169
|
+
configurable: true
|
170
|
+
});
|
171
|
+
Object.defineProperty(VideoPlayer.prototype, "currentTime", {
|
172
|
+
get: function () {
|
173
|
+
var _a, _b;
|
174
|
+
return (_b = (_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.currentTime) !== null && _b !== void 0 ? _b : this.time;
|
175
|
+
},
|
176
|
+
set: function (val) {
|
177
|
+
if (this._videoElement) {
|
178
|
+
this._videoElement.currentTime = val;
|
179
|
+
}
|
180
|
+
else
|
181
|
+
this.time = val;
|
182
|
+
},
|
183
|
+
enumerable: false,
|
184
|
+
configurable: true
|
185
|
+
});
|
186
|
+
Object.defineProperty(VideoPlayer.prototype, "isPlaying", {
|
187
|
+
get: function () {
|
188
|
+
var video = this._videoElement;
|
189
|
+
if (video) {
|
190
|
+
if (video.currentTime > 0 && !video.paused && !video.ended
|
191
|
+
&& video.readyState > video.HAVE_CURRENT_DATA)
|
192
|
+
return true;
|
193
|
+
else if (video.srcObject) {
|
194
|
+
var stream = video.srcObject;
|
195
|
+
if (stream.active)
|
196
|
+
return true;
|
197
|
+
}
|
198
|
+
}
|
199
|
+
return false;
|
200
|
+
},
|
201
|
+
enumerable: false,
|
202
|
+
configurable: true
|
203
|
+
});
|
204
|
+
Object.defineProperty(VideoPlayer.prototype, "crossOrigin", {
|
205
|
+
get: function () {
|
206
|
+
var _a, _b;
|
207
|
+
return (_b = (_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.crossOrigin) !== null && _b !== void 0 ? _b : this._crossOrigin;
|
208
|
+
},
|
209
|
+
set: function (val) {
|
210
|
+
this._crossOrigin = val;
|
211
|
+
if (this._videoElement) {
|
212
|
+
if (val !== null)
|
213
|
+
this._videoElement.setAttribute("crossorigin", val);
|
214
|
+
else
|
215
|
+
this._videoElement.removeAttribute("crossorigin");
|
216
|
+
}
|
217
|
+
},
|
218
|
+
enumerable: false,
|
219
|
+
configurable: true
|
220
|
+
});
|
221
|
+
Object.defineProperty(VideoPlayer.prototype, "videoMaterial", {
|
222
|
+
get: function () {
|
223
|
+
return this._videoMaterial;
|
224
|
+
},
|
225
|
+
enumerable: false,
|
226
|
+
configurable: true
|
227
|
+
});
|
228
|
+
Object.defineProperty(VideoPlayer.prototype, "videoTexture", {
|
229
|
+
get: function () {
|
230
|
+
return this._videoTexture;
|
231
|
+
},
|
232
|
+
enumerable: false,
|
233
|
+
configurable: true
|
234
|
+
});
|
235
|
+
Object.defineProperty(VideoPlayer.prototype, "videoElement", {
|
236
|
+
get: function () {
|
237
|
+
return this._videoElement;
|
238
|
+
},
|
239
|
+
enumerable: false,
|
240
|
+
configurable: true
|
241
|
+
});
|
242
|
+
Object.defineProperty(VideoPlayer.prototype, "muted", {
|
243
|
+
get: function () {
|
244
|
+
var _a, _b;
|
245
|
+
return (_b = (_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.muted) !== null && _b !== void 0 ? _b : this._muted;
|
246
|
+
},
|
247
|
+
set: function (val) {
|
248
|
+
this._muted = val;
|
249
|
+
if (this._videoElement)
|
250
|
+
this._videoElement.muted = val;
|
251
|
+
},
|
252
|
+
enumerable: false,
|
253
|
+
configurable: true
|
254
|
+
});
|
255
|
+
Object.defineProperty(VideoPlayer.prototype, "audioOutputMode", {
|
256
|
+
get: function () { return this._audioOutputMode; },
|
257
|
+
set: function (mode) {
|
258
|
+
if (mode !== this._audioOutputMode) {
|
259
|
+
if (mode === VideoAudioOutputMode.AudioSource && index_js_1.isDevEnvironment())
|
260
|
+
console.warn("VideoAudioOutputMode.AudioSource is not yet implemented");
|
261
|
+
this._audioOutputMode = mode;
|
262
|
+
this.updateVideoElementSettings();
|
263
|
+
}
|
264
|
+
},
|
265
|
+
enumerable: false,
|
266
|
+
configurable: true
|
267
|
+
});
|
268
|
+
VideoPlayer.prototype.setVideo = function (video) {
|
269
|
+
this.clip = video;
|
270
|
+
this.source = VideoSource.VideoClip;
|
271
|
+
if (!this._videoElement)
|
272
|
+
this.create(true);
|
273
|
+
else {
|
274
|
+
// TODO: how to prevent interruption error when another video is already playing
|
275
|
+
this._videoElement.srcObject = video;
|
276
|
+
if (this._isPlaying)
|
277
|
+
this.play();
|
278
|
+
this.updateAspect();
|
279
|
+
}
|
280
|
+
};
|
281
|
+
VideoPlayer.prototype.setClipURL = function (url) {
|
282
|
+
if (this.url === url)
|
283
|
+
return;
|
284
|
+
// console.log("SET URL", url);
|
285
|
+
this.url = url;
|
286
|
+
this.source = VideoSource.Url;
|
287
|
+
if (debug)
|
288
|
+
console.log("set url", url);
|
289
|
+
if (!this._videoElement)
|
290
|
+
this.create(true);
|
291
|
+
else {
|
292
|
+
this._videoElement.src = url;
|
293
|
+
if (this._isPlaying) {
|
294
|
+
this.stop();
|
295
|
+
this.play();
|
296
|
+
}
|
297
|
+
}
|
298
|
+
};
|
299
|
+
VideoPlayer.prototype.onEnable = function () {
|
300
|
+
var _a, _b;
|
301
|
+
if (debug)
|
302
|
+
console.log("VideoPlayer.onEnable", this);
|
303
|
+
window.addEventListener('visibilitychange', this.visibilityChanged);
|
304
|
+
if (this.playOnAwake === true) {
|
305
|
+
this.create(true);
|
306
|
+
}
|
307
|
+
if (this.screenspace) {
|
308
|
+
(_a = this._overlay) === null || _a === void 0 ? void 0 : _a.start();
|
309
|
+
}
|
310
|
+
else
|
311
|
+
(_b = this._overlay) === null || _b === void 0 ? void 0 : _b.stop();
|
312
|
+
};
|
313
|
+
VideoPlayer.prototype.onDisable = function () {
|
314
|
+
var _a;
|
315
|
+
window.removeEventListener('visibilitychange', this.visibilityChanged);
|
316
|
+
(_a = this._overlay) === null || _a === void 0 ? void 0 : _a.stop();
|
317
|
+
this.pause();
|
318
|
+
};
|
319
|
+
VideoPlayer.prototype.onDestroy = function () {
|
320
|
+
var _a;
|
321
|
+
if (this._videoElement) {
|
322
|
+
(_a = this._videoElement.parentElement) === null || _a === void 0 ? void 0 : _a.removeChild(this._videoElement);
|
323
|
+
this._videoElement = null;
|
324
|
+
}
|
325
|
+
if (this._videoTexture) {
|
326
|
+
this._videoTexture.dispose();
|
327
|
+
this._videoTexture = null;
|
328
|
+
}
|
329
|
+
};
|
330
|
+
VideoPlayer.prototype.play = function () {
|
331
|
+
var _this = this;
|
332
|
+
var _a, _b;
|
333
|
+
if (!this._videoElement)
|
334
|
+
this.create(false);
|
335
|
+
if (!this._videoElement)
|
336
|
+
return;
|
337
|
+
if (this._isPlaying && !((_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.ended) && !((_b = this._videoElement) === null || _b === void 0 ? void 0 : _b.paused))
|
338
|
+
return;
|
339
|
+
this._isPlaying = true;
|
340
|
+
if (!this._receivedInput)
|
341
|
+
this._videoElement.muted = true;
|
342
|
+
this.updateVideoElementSettings();
|
343
|
+
this._videoElement.currentTime = this.time;
|
344
|
+
this._videoElement.play()["catch"](function (err) {
|
345
|
+
var _a;
|
346
|
+
console.log(err);
|
347
|
+
// https://developer.chrome.com/blog/play-request-was-interrupted/
|
348
|
+
if (debug)
|
349
|
+
console.error("Error playing video", err, "CODE=" + err.code, (_a = _this.videoElement) === null || _a === void 0 ? void 0 : _a.src, _this);
|
350
|
+
setTimeout(function () {
|
351
|
+
if (_this._isPlaying && !_this.destroyed && _this.activeAndEnabled)
|
352
|
+
_this.play();
|
353
|
+
}, 1000);
|
354
|
+
});
|
355
|
+
if (debug)
|
356
|
+
console.log("play", this._videoElement, this.time);
|
357
|
+
};
|
358
|
+
VideoPlayer.prototype.stop = function () {
|
359
|
+
this._isPlaying = false;
|
360
|
+
this.time = 0;
|
361
|
+
if (!this._videoElement)
|
362
|
+
return;
|
363
|
+
this._videoElement.currentTime = 0;
|
364
|
+
this._videoElement.pause();
|
365
|
+
if (debug)
|
366
|
+
console.log("STOP", this);
|
367
|
+
};
|
368
|
+
VideoPlayer.prototype.pause = function () {
|
369
|
+
var _a, _b, _c;
|
370
|
+
this.time = (_b = (_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.currentTime) !== null && _b !== void 0 ? _b : 0;
|
371
|
+
this._isPlaying = false;
|
372
|
+
(_c = this._videoElement) === null || _c === void 0 ? void 0 : _c.pause();
|
373
|
+
if (debug)
|
374
|
+
console.log("PAUSE", this, this.currentTime);
|
375
|
+
};
|
376
|
+
VideoPlayer.prototype.create = function (playAutomatically) {
|
377
|
+
var _a;
|
378
|
+
var src;
|
379
|
+
switch (this.source) {
|
380
|
+
case VideoSource.VideoClip:
|
381
|
+
src = this.clip;
|
382
|
+
break;
|
383
|
+
case VideoSource.Url:
|
384
|
+
src = this.url;
|
385
|
+
if (!(src === null || src === void 0 ? void 0 : src.length) && typeof this.clip === "string")
|
386
|
+
src = this.clip;
|
387
|
+
break;
|
388
|
+
}
|
389
|
+
if (!src) {
|
390
|
+
if (debug)
|
391
|
+
console.warn("No video source set", this);
|
392
|
+
return;
|
393
|
+
}
|
394
|
+
if (!this._videoElement) {
|
395
|
+
if (debug)
|
396
|
+
console.warn("Create VideoElement", this);
|
397
|
+
this._videoElement = this.createVideoElement();
|
398
|
+
(_a = this.context.domElement) === null || _a === void 0 ? void 0 : _a.prepend(this._videoElement);
|
399
|
+
// hide it because otherwise it would overlay the website with default css
|
400
|
+
this.updateVideoElementStyles();
|
401
|
+
}
|
402
|
+
if (typeof src === "string") {
|
403
|
+
if (debug)
|
404
|
+
console.log("Set Video src", src);
|
405
|
+
this._videoElement.src = src;
|
406
|
+
// Nor sure why we did this here, but with this code the video does not restart when being paused / enable toggled
|
407
|
+
// const str = this._videoElement["captureStream"]?.call(this._videoElement);
|
408
|
+
// this.clip = str;
|
409
|
+
}
|
410
|
+
else {
|
411
|
+
if (debug)
|
412
|
+
console.log("Set Video srcObject", src);
|
413
|
+
this._videoElement.srcObject = src;
|
414
|
+
}
|
415
|
+
if (!this._videoTexture)
|
416
|
+
this._videoTexture = new three_1.VideoTexture(this._videoElement);
|
417
|
+
this._videoTexture.flipY = false;
|
418
|
+
this._videoTexture.colorSpace = three_1.SRGBColorSpace;
|
419
|
+
this.handleBeginPlaying(playAutomatically);
|
420
|
+
if (debug)
|
421
|
+
console.log(this, playAutomatically);
|
422
|
+
};
|
423
|
+
VideoPlayer.prototype.updateAspect = function () {
|
424
|
+
if (this.aspectMode === AspectMode.None)
|
425
|
+
return;
|
426
|
+
this.startCoroutine(this.updateAspectImpl());
|
427
|
+
};
|
428
|
+
Object.defineProperty(VideoPlayer.prototype, "screenspace", {
|
429
|
+
get: function () {
|
430
|
+
var _a, _b;
|
431
|
+
return (_b = (_a = this._overlay) === null || _a === void 0 ? void 0 : _a.enabled) !== null && _b !== void 0 ? _b : false;
|
432
|
+
},
|
433
|
+
set: function (val) {
|
434
|
+
var _a;
|
435
|
+
if (val) {
|
436
|
+
if (!this._videoTexture)
|
437
|
+
return;
|
438
|
+
if (!this._overlay)
|
439
|
+
this._overlay = new VideoOverlay(this.context);
|
440
|
+
this._overlay.add(this._videoTexture);
|
441
|
+
}
|
442
|
+
else
|
443
|
+
(_a = this._overlay) === null || _a === void 0 ? void 0 : _a.remove(this._videoTexture);
|
444
|
+
if (this._overlay)
|
445
|
+
this._overlay.enabled = val;
|
446
|
+
},
|
447
|
+
enumerable: false,
|
448
|
+
configurable: true
|
449
|
+
});
|
450
|
+
VideoPlayer.prototype.createVideoElement = function () {
|
451
|
+
var video = document.createElement("video");
|
452
|
+
if (this._crossOrigin)
|
453
|
+
video.setAttribute("crossorigin", this._crossOrigin);
|
454
|
+
if (debug)
|
455
|
+
console.log("created video element", video);
|
456
|
+
return video;
|
457
|
+
};
|
458
|
+
VideoPlayer.prototype.handleBeginPlaying = function (playAutomatically) {
|
459
|
+
var _a, _b;
|
460
|
+
if (!this.enabled)
|
461
|
+
return;
|
462
|
+
if (!this._videoElement)
|
463
|
+
return;
|
464
|
+
this._targetObjects.length = 0;
|
465
|
+
var target = this.gameObject;
|
466
|
+
switch (this.renderMode) {
|
467
|
+
case VideoRenderMode.MaterialOverride:
|
468
|
+
target = (_a = this.targetMaterialRenderer) === null || _a === void 0 ? void 0 : _a.gameObject;
|
469
|
+
if (!target)
|
470
|
+
target = (_b = Component_js_1.GameObject.getComponent(this.gameObject, Renderer_js_1.Renderer)) === null || _b === void 0 ? void 0 : _b.gameObject;
|
471
|
+
break;
|
472
|
+
case VideoRenderMode.RenderTexture:
|
473
|
+
console.error("VideoPlayer renderTexture not implemented yet. Please use material override instead");
|
474
|
+
return;
|
475
|
+
}
|
476
|
+
if (!target) {
|
477
|
+
console.error("Missing target for video material renderer", this.name, VideoRenderMode[this.renderMode], this);
|
478
|
+
return;
|
479
|
+
}
|
480
|
+
var mat = target["material"];
|
481
|
+
if (mat) {
|
482
|
+
this._targetObjects.push(target);
|
483
|
+
if (mat !== this._videoMaterial) {
|
484
|
+
this._videoMaterial = mat.clone();
|
485
|
+
target["material"] = this._videoMaterial;
|
486
|
+
}
|
487
|
+
var fieldName = "map";
|
488
|
+
var videoMaterial = this._videoMaterial;
|
489
|
+
if (!this.targetMaterialProperty) {
|
490
|
+
videoMaterial[fieldName] = this._videoTexture;
|
491
|
+
}
|
492
|
+
else {
|
493
|
+
switch (this.targetMaterialProperty) {
|
494
|
+
default:
|
495
|
+
videoMaterial[fieldName] = this._videoTexture;
|
496
|
+
break;
|
497
|
+
// doesnt render:
|
498
|
+
// case "emissiveTexture":
|
499
|
+
// console.log(this.videoMaterial);
|
500
|
+
// // (this.videoMaterial as any).map = this.videoTexture;
|
501
|
+
// (this.videoMaterial as any).emissive?.set(1,1,1);// = this.videoTexture;
|
502
|
+
// (this.videoMaterial as any).emissiveMap = this.videoTexture;
|
503
|
+
// break;
|
504
|
+
}
|
505
|
+
}
|
506
|
+
}
|
507
|
+
else {
|
508
|
+
console.warn("Can not play video, no material found, this might be a multimaterial case which is not supported yet");
|
509
|
+
return;
|
510
|
+
}
|
511
|
+
this.updateVideoElementSettings();
|
512
|
+
this.updateVideoElementStyles();
|
513
|
+
if (playAutomatically)
|
514
|
+
this.play();
|
515
|
+
};
|
516
|
+
VideoPlayer.prototype.updateVideoElementSettings = function () {
|
517
|
+
if (!this._videoElement)
|
518
|
+
return;
|
519
|
+
this._videoElement.loop = this._isLooping;
|
520
|
+
this._videoElement.currentTime = this.currentTime;
|
521
|
+
this._videoElement.playbackRate = this._playbackSpeed;
|
522
|
+
// dont open in fullscreen on ios
|
523
|
+
this._videoElement.playsInline = true;
|
524
|
+
var muted = !this._receivedInput || this.audioOutputMode === VideoAudioOutputMode.None;
|
525
|
+
if (!muted && this._muted)
|
526
|
+
muted = true;
|
527
|
+
this._videoElement.muted = muted;
|
528
|
+
if (this.playOnAwake)
|
529
|
+
this._videoElement.autoplay = true;
|
530
|
+
};
|
531
|
+
VideoPlayer.prototype.updateVideoElementStyles = function () {
|
532
|
+
if (!this._videoElement)
|
533
|
+
return;
|
534
|
+
// set style here so preview frame is rendered
|
535
|
+
// set display and selectable because otherwise is interfers with input/focus e.g. breaks orbit control
|
536
|
+
this._videoElement.style.userSelect = "none";
|
537
|
+
this._videoElement.style.visibility = "hidden";
|
538
|
+
this._videoElement.style.display = "none";
|
539
|
+
this.updateAspect();
|
540
|
+
};
|
541
|
+
VideoPlayer.prototype.updateAspectImpl = function () {
|
542
|
+
var id, lastAspect, stream, aspect, _i, _a, track, settings, i, _b, _c, obj, worldAspect, parentScale, i;
|
543
|
+
return __generator(this, function (_d) {
|
544
|
+
switch (_d.label) {
|
545
|
+
case 0:
|
546
|
+
id = ++this._updateAspectRoutineId;
|
547
|
+
lastAspect = undefined;
|
548
|
+
stream = this.clip;
|
549
|
+
_d.label = 1;
|
550
|
+
case 1:
|
551
|
+
if (!(id === this._updateAspectRoutineId && this.aspectMode !== AspectMode.None && this.clip && stream === this.clip && this._isPlaying)) return [3 /*break*/, 13];
|
552
|
+
if (!stream || typeof stream === "string") {
|
553
|
+
return [2 /*return*/];
|
554
|
+
}
|
555
|
+
aspect = undefined;
|
556
|
+
for (_i = 0, _a = stream.getVideoTracks(); _i < _a.length; _i++) {
|
557
|
+
track = _a[_i];
|
558
|
+
settings = track.getSettings();
|
559
|
+
if (settings && settings.width && settings.height) {
|
560
|
+
aspect = settings.width / settings.height;
|
561
|
+
break;
|
562
|
+
}
|
563
|
+
// on firefox capture canvas stream works but looks like
|
564
|
+
// the canvas stream track doesnt contain settings?!!?
|
565
|
+
else {
|
566
|
+
aspect = this.context.renderer.domElement.clientWidth / this.context.renderer.domElement.clientHeight;
|
567
|
+
}
|
568
|
+
}
|
569
|
+
if (!(aspect === undefined)) return [3 /*break*/, 6];
|
570
|
+
i = 0;
|
571
|
+
_d.label = 2;
|
572
|
+
case 2:
|
573
|
+
if (!(i < 10)) return [3 /*break*/, 5];
|
574
|
+
return [4 /*yield*/];
|
575
|
+
case 3:
|
576
|
+
_d.sent();
|
577
|
+
_d.label = 4;
|
578
|
+
case 4:
|
579
|
+
i++;
|
580
|
+
return [3 /*break*/, 2];
|
581
|
+
case 5:
|
582
|
+
if (!this.isPlaying)
|
583
|
+
return [3 /*break*/, 13];
|
584
|
+
return [3 /*break*/, 1];
|
585
|
+
case 6:
|
586
|
+
if (!(lastAspect === aspect)) return [3 /*break*/, 8];
|
587
|
+
return [4 /*yield*/];
|
588
|
+
case 7:
|
589
|
+
_d.sent();
|
590
|
+
return [3 /*break*/, 1];
|
591
|
+
case 8:
|
592
|
+
for (_b = 0, _c = this._targetObjects; _b < _c.length; _b++) {
|
593
|
+
obj = _c[_b];
|
594
|
+
worldAspect = 1;
|
595
|
+
if (obj.parent) {
|
596
|
+
parentScale = engine_three_utils_js_1.getWorldScale(obj.parent);
|
597
|
+
worldAspect = parentScale.x / parentScale.y;
|
598
|
+
}
|
599
|
+
switch (this.aspectMode) {
|
600
|
+
case AspectMode.AdjustHeight:
|
601
|
+
obj.scale.y = 1 / aspect * obj.scale.x * worldAspect;
|
602
|
+
break;
|
603
|
+
case AspectMode.AdjustWidth:
|
604
|
+
obj.scale.x = aspect * obj.scale.y * worldAspect;
|
605
|
+
break;
|
606
|
+
}
|
607
|
+
}
|
608
|
+
i = 0;
|
609
|
+
_d.label = 9;
|
610
|
+
case 9:
|
611
|
+
if (!(i < 3)) return [3 /*break*/, 12];
|
612
|
+
return [4 /*yield*/];
|
613
|
+
case 10:
|
614
|
+
_d.sent();
|
615
|
+
_d.label = 11;
|
616
|
+
case 11:
|
617
|
+
i++;
|
618
|
+
return [3 /*break*/, 9];
|
619
|
+
case 12: return [3 /*break*/, 1];
|
620
|
+
case 13: return [2 /*return*/];
|
621
|
+
}
|
622
|
+
});
|
623
|
+
};
|
624
|
+
__decorate([
|
625
|
+
engine_serialization_decorator_js_1.serializable()
|
626
|
+
], VideoPlayer.prototype, "playOnAwake");
|
627
|
+
__decorate([
|
628
|
+
engine_serialization_decorator_js_1.serializable()
|
629
|
+
], VideoPlayer.prototype, "aspectMode");
|
630
|
+
__decorate([
|
631
|
+
engine_serialization_decorator_js_1.serializable(URL)
|
632
|
+
], VideoPlayer.prototype, "clip");
|
633
|
+
__decorate([
|
634
|
+
engine_serialization_decorator_js_1.serializable()
|
635
|
+
], VideoPlayer.prototype, "renderMode");
|
636
|
+
__decorate([
|
637
|
+
engine_serialization_decorator_js_1.serializable()
|
638
|
+
], VideoPlayer.prototype, "targetMaterialProperty");
|
639
|
+
__decorate([
|
640
|
+
engine_serialization_decorator_js_1.serializable(Renderer_js_1.Renderer)
|
641
|
+
], VideoPlayer.prototype, "targetMaterialRenderer");
|
642
|
+
__decorate([
|
643
|
+
engine_serialization_decorator_js_1.serializable(three_1.Texture)
|
644
|
+
], VideoPlayer.prototype, "targetTexture");
|
645
|
+
__decorate([
|
646
|
+
engine_serialization_decorator_js_1.serializable()
|
647
|
+
], VideoPlayer.prototype, "time");
|
648
|
+
__decorate([
|
649
|
+
engine_serialization_decorator_js_1.serializable()
|
650
|
+
], VideoPlayer.prototype, "playbackSpeed");
|
651
|
+
__decorate([
|
652
|
+
engine_serialization_decorator_js_1.serializable()
|
653
|
+
], VideoPlayer.prototype, "isLooping");
|
654
|
+
__decorate([
|
655
|
+
engine_serialization_decorator_js_1.serializable()
|
656
|
+
], VideoPlayer.prototype, "audioOutputMode");
|
657
|
+
return VideoPlayer;
|
658
|
+
}(Component_js_1.Behaviour));
|
659
|
+
exports.VideoPlayer = VideoPlayer;
|
660
|
+
var VideoOverlay = /** @class */ (function () {
|
661
|
+
function VideoOverlay(context) {
|
662
|
+
this._videos = [];
|
663
|
+
this._isInScreenspaceMode = false;
|
664
|
+
this.context = context;
|
665
|
+
this._input = new VideoOverlayInput(this);
|
666
|
+
}
|
667
|
+
Object.defineProperty(VideoOverlay.prototype, "enabled", {
|
668
|
+
get: function () {
|
669
|
+
return this._isInScreenspaceMode;
|
670
|
+
},
|
671
|
+
set: function (val) {
|
672
|
+
if (val)
|
673
|
+
this.start();
|
674
|
+
else
|
675
|
+
this.stop();
|
676
|
+
},
|
677
|
+
enumerable: false,
|
678
|
+
configurable: true
|
679
|
+
});
|
680
|
+
VideoOverlay.prototype.add = function (video) {
|
681
|
+
if (this._videos.indexOf(video) === -1) {
|
682
|
+
this._videos.push(video);
|
683
|
+
}
|
684
|
+
};
|
685
|
+
VideoOverlay.prototype.remove = function (video) {
|
686
|
+
if (!video)
|
687
|
+
return;
|
688
|
+
var index = this._videos.indexOf(video);
|
689
|
+
if (index >= 0) {
|
690
|
+
this._videos.splice(index, 1);
|
691
|
+
}
|
692
|
+
};
|
693
|
+
VideoOverlay.prototype.start = function () {
|
694
|
+
var _a;
|
695
|
+
if (this._isInScreenspaceMode)
|
696
|
+
return;
|
697
|
+
if (this._videos.length < 0)
|
698
|
+
return;
|
699
|
+
var texture = this._videos[this._videos.length - 1];
|
700
|
+
if (!texture)
|
701
|
+
return;
|
702
|
+
this._isInScreenspaceMode = true;
|
703
|
+
if (!this._screenspaceModeQuad) {
|
704
|
+
this._screenspaceModeQuad = engine_create_objects_js_1.ObjectUtils.createPrimitive(engine_create_objects_js_1.PrimitiveType.Quad, {
|
705
|
+
material: new ScreenspaceTexture(texture)
|
706
|
+
});
|
707
|
+
if (!this._screenspaceModeQuad)
|
708
|
+
return;
|
709
|
+
this._screenspaceModeQuad.geometry.scale(2, 2, 2);
|
710
|
+
}
|
711
|
+
var quad = this._screenspaceModeQuad;
|
712
|
+
this.context.scene.add(quad);
|
713
|
+
this.updateScreenspaceMaterialUniforms();
|
714
|
+
var mat = quad.material;
|
715
|
+
mat === null || mat === void 0 ? void 0 : mat.reset();
|
716
|
+
(_a = this._input) === null || _a === void 0 ? void 0 : _a.enable(mat);
|
717
|
+
};
|
718
|
+
VideoOverlay.prototype.stop = function () {
|
719
|
+
var _a;
|
720
|
+
this._isInScreenspaceMode = false;
|
721
|
+
if (this._screenspaceModeQuad) {
|
722
|
+
(_a = this._input) === null || _a === void 0 ? void 0 : _a.disable();
|
723
|
+
this._screenspaceModeQuad.removeFromParent();
|
724
|
+
}
|
725
|
+
};
|
726
|
+
VideoOverlay.prototype.updateScreenspaceMaterialUniforms = function () {
|
727
|
+
var _a;
|
728
|
+
var mat = (_a = this._screenspaceModeQuad) === null || _a === void 0 ? void 0 : _a.material;
|
729
|
+
if (!mat)
|
730
|
+
return;
|
731
|
+
// mat.videoAspect = this.videoTexture?.image?.width / this.videoTexture?.image?.height;
|
732
|
+
mat.screenAspect = this.context.domElement.clientWidth / this.context.domElement.clientHeight;
|
733
|
+
};
|
734
|
+
return VideoOverlay;
|
735
|
+
}());
|
736
|
+
var VideoOverlayInput = /** @class */ (function () {
|
737
|
+
function VideoOverlayInput(overlay) {
|
738
|
+
this._isPinching = false;
|
739
|
+
this._lastPinch = 0;
|
740
|
+
this.overlay = overlay;
|
741
|
+
this.context = overlay.context;
|
742
|
+
}
|
743
|
+
VideoOverlayInput.prototype.enable = function (mat) {
|
744
|
+
var _this = this;
|
745
|
+
this._material = mat;
|
746
|
+
window.addEventListener("resize", this._onResizeScreenFn = function () {
|
747
|
+
_this.overlay.updateScreenspaceMaterialUniforms();
|
748
|
+
});
|
749
|
+
window.addEventListener("keyup", this._onKeyUpFn = function (args) {
|
750
|
+
if (args.key === "Escape")
|
751
|
+
_this.overlay.stop();
|
752
|
+
});
|
753
|
+
window.addEventListener("wheel", this._onMouseWheelFn = function (args) {
|
754
|
+
if (_this.overlay.enabled) {
|
755
|
+
mat.zoom += args.deltaY * .0005;
|
756
|
+
args.preventDefault();
|
757
|
+
}
|
758
|
+
}, { passive: false });
|
759
|
+
var delta = new three_1.Vector2();
|
760
|
+
window.addEventListener("mousemove", function (args) {
|
761
|
+
if (_this.overlay.enabled && _this.context.input.getPointerPressed(0)) {
|
762
|
+
var normalizedMovement = new three_1.Vector2(args.movementX, args.movementY);
|
763
|
+
normalizedMovement.x /= _this.context.domElement.clientWidth;
|
764
|
+
normalizedMovement.y /= _this.context.domElement.clientHeight;
|
765
|
+
delta.set(normalizedMovement.x, normalizedMovement.y);
|
766
|
+
delta.multiplyScalar(mat.zoom / -_this.context.time.deltaTime * .01);
|
767
|
+
mat.offset = mat.offset.add(delta);
|
768
|
+
}
|
769
|
+
});
|
770
|
+
window.addEventListener("pointermove", function (args) {
|
771
|
+
if (_this.overlay.enabled && _this.context.input.getPointerPressed(0)) {
|
772
|
+
var count = _this.context.input.getTouchesPressedCount();
|
773
|
+
if (count === 1) {
|
774
|
+
delta.set(args.movementX, args.movementY);
|
775
|
+
delta.multiplyScalar(mat.zoom * -_this.context.time.deltaTime * .05);
|
776
|
+
mat.offset = mat.offset.add(delta);
|
777
|
+
}
|
778
|
+
}
|
779
|
+
});
|
780
|
+
var lastTouchStartTime = 0;
|
781
|
+
window.addEventListener("touchstart", function (e) {
|
782
|
+
if (e.touches.length < 2) {
|
783
|
+
if (_this.context.time.time - lastTouchStartTime < .3) {
|
784
|
+
_this.overlay.stop();
|
785
|
+
}
|
786
|
+
lastTouchStartTime = _this.context.time.time;
|
787
|
+
return;
|
788
|
+
}
|
789
|
+
_this._isPinching = true;
|
790
|
+
_this._lastPinch = 0;
|
791
|
+
});
|
792
|
+
window.addEventListener("touchmove", function (e) {
|
793
|
+
if (!_this._isPinching || !_this._material)
|
794
|
+
return;
|
795
|
+
var touch1 = e.touches[0];
|
796
|
+
var touch2 = e.touches[1];
|
797
|
+
var dx = touch1.clientX - touch2.clientX;
|
798
|
+
var dy = touch1.clientY - touch2.clientY;
|
799
|
+
var distance = Math.sqrt(dx * dx + dy * dy);
|
800
|
+
if (_this._lastPinch !== 0) {
|
801
|
+
var delta_1 = distance - _this._lastPinch;
|
802
|
+
_this._material.zoom -= delta_1 * .004;
|
803
|
+
}
|
804
|
+
_this._lastPinch = distance;
|
805
|
+
});
|
806
|
+
window.addEventListener("touchend", function () {
|
807
|
+
_this._isPinching = false;
|
808
|
+
});
|
809
|
+
};
|
810
|
+
VideoOverlayInput.prototype.disable = function () {
|
811
|
+
if (this._onResizeScreenFn) {
|
812
|
+
window.removeEventListener("resize", this._onResizeScreenFn);
|
813
|
+
this._onResizeScreenFn = undefined;
|
814
|
+
}
|
815
|
+
if (this._onKeyUpFn) {
|
816
|
+
window.removeEventListener("keyup", this._onKeyUpFn);
|
817
|
+
this._onKeyUpFn = undefined;
|
818
|
+
}
|
819
|
+
if (this._onMouseWheelFn) {
|
820
|
+
window.removeEventListener("wheel", this._onMouseWheelFn);
|
821
|
+
this._onMouseWheelFn = undefined;
|
822
|
+
}
|
823
|
+
};
|
824
|
+
return VideoOverlayInput;
|
825
|
+
}());
|
826
|
+
var ScreenspaceTexture = /** @class */ (function (_super) {
|
827
|
+
__extends(ScreenspaceTexture, _super);
|
828
|
+
// maxZoom : number = 10
|
829
|
+
function ScreenspaceTexture(tex) {
|
830
|
+
var _this = _super.call(this) || this;
|
831
|
+
_this._offset = new three_1.Vector2();
|
832
|
+
_this.uniforms = {
|
833
|
+
map: { value: tex },
|
834
|
+
screenAspect: { value: 1 },
|
835
|
+
offsetScale: { value: new three_1.Vector4(0, 0, 1, 1) }
|
836
|
+
};
|
837
|
+
_this.vertexShader = "\n uniform sampler2D map;\n uniform float screenAspect;\n uniform vec4 offsetScale;\n varying vec2 vUv;\n\n void main() {\n\n gl_Position = vec4( position , 1.0 );\n vUv = uv;\n vUv.y = 1. - vUv.y;\n\n // fit into screen\n ivec2 res = textureSize(map, 0);\n float videoAspect = float(res.x) / float(res.y);\n float aspect = videoAspect / screenAspect;\n if(aspect >= 1.0) \n {\n vUv.y = vUv.y * aspect;\n float offset = (1. - aspect) * .5;\n vUv.y = vUv.y + offset;\n }\n else\n {\n vUv.x = vUv.x / aspect;\n float offset = (1. - 1. / aspect) * .5;\n vUv.x = vUv.x + offset;\n }\n\n vUv.x -= .5;\n vUv.y -= .5;\n\n vUv.x *= offsetScale.z;\n vUv.y *= offsetScale.z;\n vUv.x += offsetScale.x;\n vUv.y += offsetScale.y;\n\n vUv.x += .5;\n vUv.y += .5;\n }\n\n ";
|
838
|
+
_this.fragmentShader = "\n uniform sampler2D map;\n varying vec2 vUv;\n void main() {\n if(vUv.x < 0. || vUv.x > 1. || vUv.y < 0. || vUv.y > 1.)\n gl_FragColor = vec4(0., 0., 0., 1.);\n else\n {\n vec4 texcolor = texture2D(map, vUv);\n texcolor = LinearTosRGB(texcolor);\n gl_FragColor = texcolor;\n }\n }\n ";
|
839
|
+
return _this;
|
840
|
+
}
|
841
|
+
Object.defineProperty(ScreenspaceTexture.prototype, "screenAspect", {
|
842
|
+
set: function (val) {
|
843
|
+
this.uniforms["screenAspect"].value = val;
|
844
|
+
this.needsUpdate = true;
|
845
|
+
},
|
846
|
+
enumerable: false,
|
847
|
+
configurable: true
|
848
|
+
});
|
849
|
+
Object.defineProperty(ScreenspaceTexture.prototype, "offset", {
|
850
|
+
get: function () {
|
851
|
+
var val = this.uniforms["offsetScale"].value;
|
852
|
+
this._offset.set(val.x, val.y);
|
853
|
+
return this._offset;
|
854
|
+
},
|
855
|
+
set: function (vec) {
|
856
|
+
var val = this.uniforms["offsetScale"].value;
|
857
|
+
val.x = vec.x;
|
858
|
+
val.y = vec.y;
|
859
|
+
// console.log(val);
|
860
|
+
this.uniforms["offsetScale"].value = val;
|
861
|
+
this.needsUpdate = true;
|
862
|
+
},
|
863
|
+
enumerable: false,
|
864
|
+
configurable: true
|
865
|
+
});
|
866
|
+
Object.defineProperty(ScreenspaceTexture.prototype, "zoom", {
|
867
|
+
get: function () {
|
868
|
+
return this.uniforms["offsetScale"].value.z; // * this.maxZoom;
|
869
|
+
},
|
870
|
+
set: function (val) {
|
871
|
+
var zoom = this.uniforms["offsetScale"].value;
|
872
|
+
if (val < .001)
|
873
|
+
val = .001;
|
874
|
+
zoom.z = val;
|
875
|
+
// zoom.z = this.maxZoom - val;
|
876
|
+
// zoom.z /= this.maxZoom;
|
877
|
+
this.needsUpdate = true;
|
878
|
+
},
|
879
|
+
enumerable: false,
|
880
|
+
configurable: true
|
881
|
+
});
|
882
|
+
ScreenspaceTexture.prototype.reset = function () {
|
883
|
+
this.offset = this.offset.set(0, 0);
|
884
|
+
this.zoom = 1;
|
885
|
+
this.needsUpdate = true;
|
886
|
+
};
|
887
|
+
return ScreenspaceTexture;
|
888
|
+
}(three_1.ShaderMaterial));
|
@@ -0,0 +1,46 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __extends = (this && this.__extends) || (function () {
|
3
|
+
var extendStatics = function (d, b) {
|
4
|
+
extendStatics = Object.setPrototypeOf ||
|
5
|
+
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
6
|
+
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
7
|
+
return extendStatics(d, b);
|
8
|
+
};
|
9
|
+
return function (d, b) {
|
10
|
+
extendStatics(d, b);
|
11
|
+
function __() { this.constructor = d; }
|
12
|
+
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
13
|
+
};
|
14
|
+
})();
|
15
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
16
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
17
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
18
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
19
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
20
|
+
};
|
21
|
+
exports.__esModule = true;
|
22
|
+
exports.Voip2 = void 0;
|
23
|
+
var Component_js_1 = require("./Component.js");
|
24
|
+
var engine_networking_streams_js_1 = require("../engine/engine_networking_streams.js");
|
25
|
+
var Voip2 = /** @class */ (function (_super) {
|
26
|
+
__extends(Voip2, _super);
|
27
|
+
function Voip2() {
|
28
|
+
var _this = _super !== null && _super.apply(this, arguments) || this;
|
29
|
+
_this.connectOnEnable = false;
|
30
|
+
return _this;
|
31
|
+
}
|
32
|
+
Voip2.prototype.onEnable = function () {
|
33
|
+
if (!this._net)
|
34
|
+
this._net = engine_networking_streams_js_1.NetworkedStreams.create(this);
|
35
|
+
this._net.enable();
|
36
|
+
};
|
37
|
+
Voip2.prototype.onDisable = function () {
|
38
|
+
var _a;
|
39
|
+
(_a = this._net) === null || _a === void 0 ? void 0 : _a.disable();
|
40
|
+
};
|
41
|
+
__decorate([
|
42
|
+
serializable()
|
43
|
+
], Voip2.prototype, "connectOnEnable");
|
44
|
+
return Voip2;
|
45
|
+
}(Component_js_1.Behaviour));
|
46
|
+
exports.Voip2 = Voip2;
|
@@ -0,0 +1,489 @@
|
|
1
|
+
import { type Context } from "./engine_context.js";
|
2
|
+
import Peer, { MediaConnection } from "peerjs"
|
3
|
+
import { RoomEvents } from "../engine/engine_networking.js";
|
4
|
+
import { UserJoinedOrLeftRoomModel } from "../engine/engine_networking.js";
|
5
|
+
import type { IModel } from "./engine_networking_types.js";
|
6
|
+
import { getPeerjsInstance } from "../engine/engine_networking_peer.js";
|
7
|
+
import { EventDispatcher } from "three";
|
8
|
+
import { getParam } from "./engine_utils.js";
|
9
|
+
import { type IComponent } from "./engine_types.js";
|
10
|
+
|
11
|
+
|
12
|
+
|
13
|
+
const debug = getParam("debugnetworkingstreams");
|
14
|
+
|
15
|
+
export enum NetworkedStreamEvents {
|
16
|
+
Connected = "peer-user-connected",
|
17
|
+
StreamReceived = "receive-stream",
|
18
|
+
StreamEnded = "call-ended",
|
19
|
+
Disconnected = "peer-user-disconnected",
|
20
|
+
UserJoined = "user-joined",
|
21
|
+
}
|
22
|
+
|
23
|
+
export class StreamEndedEvent {
|
24
|
+
readonly type = NetworkedStreamEvents.StreamEnded;
|
25
|
+
readonly userId: string;
|
26
|
+
readonly direction: CallDirection;
|
27
|
+
constructor(userId: string, direction: CallDirection) {
|
28
|
+
this.userId = userId;
|
29
|
+
this.direction = direction;
|
30
|
+
}
|
31
|
+
}
|
32
|
+
export class StreamReceivedEvent {
|
33
|
+
readonly type = NetworkedStreamEvents.StreamReceived;
|
34
|
+
readonly stream: MediaStream;
|
35
|
+
readonly target: CallHandle;
|
36
|
+
constructor(stream: MediaStream, target: CallHandle) {
|
37
|
+
this.stream = stream
|
38
|
+
this.target = target;
|
39
|
+
}
|
40
|
+
}
|
41
|
+
|
42
|
+
class PeerUserConnectedModel implements IModel {
|
43
|
+
/** the peer handle id */
|
44
|
+
readonly guid: string;
|
45
|
+
readonly peerId: string;
|
46
|
+
// internal so server doesnt save it to persistent storage
|
47
|
+
readonly dontSave: boolean = true;
|
48
|
+
constructor(handle: PeerHandle, peerId: string) {
|
49
|
+
this.guid = handle.id;
|
50
|
+
this.peerId = peerId;
|
51
|
+
}
|
52
|
+
}
|
53
|
+
|
54
|
+
export enum CallDirection {
|
55
|
+
Incoming = "incoming",
|
56
|
+
Outgoing = "outgoing",
|
57
|
+
}
|
58
|
+
|
59
|
+
class CallHandle extends EventDispatcher {
|
60
|
+
readonly userId: string;
|
61
|
+
readonly direction: CallDirection;
|
62
|
+
readonly call: MediaConnection;
|
63
|
+
get stream() { return this._stream; };
|
64
|
+
|
65
|
+
private _stream: MediaStream | null = null;
|
66
|
+
private _isDisposed: boolean = false;
|
67
|
+
|
68
|
+
close() {
|
69
|
+
if (this._isDisposed) return;
|
70
|
+
this._isDisposed = true;
|
71
|
+
this.call.close();
|
72
|
+
disposeStream(this._stream);
|
73
|
+
}
|
74
|
+
|
75
|
+
get isOpen() {
|
76
|
+
return this.call.peerConnection?.connectionState === "connected";// && this._stream?.active;
|
77
|
+
}
|
78
|
+
|
79
|
+
get isOpening() {
|
80
|
+
return this.call.peerConnection?.connectionState === "connecting";
|
81
|
+
}
|
82
|
+
|
83
|
+
get isClosed() {
|
84
|
+
return !this.isOpen;
|
85
|
+
}
|
86
|
+
|
87
|
+
constructor(userId: string, call: MediaConnection, direction: CallDirection) {
|
88
|
+
super();
|
89
|
+
this.userId = userId;
|
90
|
+
this.call = call;
|
91
|
+
this.direction = direction;
|
92
|
+
this._stream = null;
|
93
|
+
call.on("stream", stream => {
|
94
|
+
if (debug)
|
95
|
+
console.log("Receive video", stream.getAudioTracks(), stream.getVideoTracks());
|
96
|
+
this._stream = stream;
|
97
|
+
if (direction === CallDirection.Incoming) {
|
98
|
+
const args: StreamReceivedEvent = new StreamReceivedEvent(stream, this);
|
99
|
+
this.dispatchEvent(args);
|
100
|
+
}
|
101
|
+
});
|
102
|
+
call.on("close", () => {
|
103
|
+
this.dispatchEvent(new StreamEndedEvent(userId, direction));
|
104
|
+
})
|
105
|
+
}
|
106
|
+
}
|
107
|
+
|
108
|
+
export class PeerHandle extends EventDispatcher {
|
109
|
+
|
110
|
+
private static readonly instances: Map<string, PeerHandle> = new Map();
|
111
|
+
|
112
|
+
static getOrCreate(context: Context, guid: string): PeerHandle {
|
113
|
+
// if (id === undefined) {
|
114
|
+
// // randomId
|
115
|
+
// id = Math.random().toFixed(5);
|
116
|
+
// }
|
117
|
+
if (PeerHandle.instances.has(guid))
|
118
|
+
return PeerHandle.instances.get(guid)!;
|
119
|
+
const peer = new PeerHandle(context, guid);
|
120
|
+
PeerHandle.instances.set(guid, peer);
|
121
|
+
return peer;
|
122
|
+
}
|
123
|
+
|
124
|
+
getMyPeerId(): string | undefined {
|
125
|
+
if (this.context.connection.connectionId)
|
126
|
+
return this.getPeerIdFromUserId(this.context.connection.connectionId);
|
127
|
+
return undefined;
|
128
|
+
}
|
129
|
+
|
130
|
+
getPeerIdFromUserId(userConnectionId: string): string {
|
131
|
+
// we build the peer id ourselves so we dont need to wait for peer to report it
|
132
|
+
return this.id + "-" + userConnectionId;
|
133
|
+
}
|
134
|
+
|
135
|
+
getUserIdFromPeerId(peerId: string): string {
|
136
|
+
return peerId.substring(this.id.length + 1);
|
137
|
+
}
|
138
|
+
|
139
|
+
makeCall(peerId: string, stream: MediaStream): CallHandle | undefined {
|
140
|
+
const opts = { metadata: { userId: this.context.connection.connectionId } };
|
141
|
+
const call = this._peer?.call(peerId, stream, opts);
|
142
|
+
if (call)
|
143
|
+
return this.registerCall(call, CallDirection.Outgoing);
|
144
|
+
return undefined;
|
145
|
+
}
|
146
|
+
|
147
|
+
get peer(): Peer | undefined { return this._peer; }
|
148
|
+
|
149
|
+
readonly id: string;
|
150
|
+
readonly context: Context;
|
151
|
+
private _peer: Peer | undefined;
|
152
|
+
private _incomingCalls: CallHandle[] = [];
|
153
|
+
private _outgoingCalls: CallHandle[] = [];
|
154
|
+
|
155
|
+
private constructor(context: Context, id: string) {
|
156
|
+
super();
|
157
|
+
this.context = context;
|
158
|
+
this.id = id;
|
159
|
+
this.setupPeer();
|
160
|
+
navigator["getUserMedia"] = (
|
161
|
+
navigator["getUserMedia"] || navigator["webkitGetUserMedia"] ||
|
162
|
+
navigator["mozGetUserMedia"] || navigator["msGetUserMedia"]
|
163
|
+
);
|
164
|
+
}
|
165
|
+
|
166
|
+
private _enabled: boolean = false;
|
167
|
+
private _enabledPeer: boolean = false;
|
168
|
+
private onConnectRoomFn: Function = this.onConnectRoom.bind(this);
|
169
|
+
// private onUserJoinedOrLeftRoomFn: Function = this.onUserJoinedOrLeftRoom.bind(this);
|
170
|
+
private onPeerConnectFn: (id) => void = this.onPeerConnect.bind(this);
|
171
|
+
private onPeerReceiveCallFn: (call) => void = this.onPeerReceivingCall.bind(this);
|
172
|
+
// private _connectionPeerIdMap : Map<string, string> = new Map();
|
173
|
+
|
174
|
+
enable() {
|
175
|
+
if (this._enabled) return;
|
176
|
+
this._enabled = true;
|
177
|
+
this.context.connection.beginListen(RoomEvents.JoinedRoom, this.onConnectRoomFn);
|
178
|
+
// this.context.connection.beginListen(RoomEvents.UserJoinedRoom, this.onUserJoinedOrLeftRoomFn);
|
179
|
+
// this.context.connection.beginListen(RoomEvents.UserLeftRoom, this.onUserJoinedOrLeftRoomFn);
|
180
|
+
this.subscribePeerEvents();
|
181
|
+
}
|
182
|
+
|
183
|
+
disable() {
|
184
|
+
if (!this._enabled) return;
|
185
|
+
this._enabled = false;
|
186
|
+
this.context.connection.stopListen(RoomEvents.JoinedRoom, this.onConnectRoomFn);
|
187
|
+
// this.context.connection.stopListen(RoomEvents.UserJoinedRoom, this.onUserJoinedOrLeftRoomFn);
|
188
|
+
// this.context.connection.stopListen(RoomEvents.UserLeftRoom, this.onUserJoinedOrLeftRoomFn);
|
189
|
+
this.unsubscribePeerEvents();
|
190
|
+
}
|
191
|
+
|
192
|
+
private onConnectRoom(): void {
|
193
|
+
this.setupPeer();
|
194
|
+
};
|
195
|
+
|
196
|
+
// private onUserJoinedOrLeftRoom(_: UserJoinedOrLeftRoomModel): void {
|
197
|
+
// };
|
198
|
+
|
199
|
+
private setupPeer() {
|
200
|
+
if (!this.context.connection.connectionId) return;
|
201
|
+
if (this._enabledPeer) return;
|
202
|
+
this._enabledPeer = true;
|
203
|
+
if (!this._peer) {
|
204
|
+
const peerId = this.getMyPeerId();
|
205
|
+
if (peerId)
|
206
|
+
this._peer = getPeerjsInstance(peerId);
|
207
|
+
else console.error("Failed to setup peerjs because we dont have a connection id", this.context.connection.connectionId);
|
208
|
+
}
|
209
|
+
if (this._enabled)
|
210
|
+
this.subscribePeerEvents();
|
211
|
+
}
|
212
|
+
|
213
|
+
private subscribePeerEvents() {
|
214
|
+
if (!this._peer) return;
|
215
|
+
this._peer.on("open", this.onPeerConnectFn);
|
216
|
+
this._peer.on("call", this.onPeerReceiveCallFn);
|
217
|
+
// this.context.connection.beginListen(PeerEvent.Connected, this.onRemotePeerConnect.bind(this));
|
218
|
+
// TODO: make connection to all current active calls even if the user is not anymore in the needle room
|
219
|
+
}
|
220
|
+
|
221
|
+
private unsubscribePeerEvents() {
|
222
|
+
if (!this._peer) return;
|
223
|
+
this._peer.off("open", this.onPeerConnectFn);
|
224
|
+
this._peer.off("call", this.onPeerReceiveCallFn);
|
225
|
+
// this.context.connection.stopListen(PeerEvent.Connected, this.onRemotePeerConnect.bind(this));
|
226
|
+
}
|
227
|
+
|
228
|
+
private onPeerConnect(id): void {
|
229
|
+
if (debug)
|
230
|
+
console.log("Peer connected as", id);
|
231
|
+
this.context.connection.send(NetworkedStreamEvents.Connected, new PeerUserConnectedModel(this, id));
|
232
|
+
}
|
233
|
+
|
234
|
+
private onPeerReceivingCall(call: MediaConnection): void {
|
235
|
+
call.answer();
|
236
|
+
this.registerCall(call, CallDirection.Incoming);
|
237
|
+
}
|
238
|
+
|
239
|
+
private registerCall(call: MediaConnection, direction: CallDirection): CallHandle {
|
240
|
+
|
241
|
+
const meta = call.metadata;
|
242
|
+
if (!meta || !meta.userId) {
|
243
|
+
console.error("Missing call metadata", call);
|
244
|
+
}
|
245
|
+
const userId = meta.userId;
|
246
|
+
|
247
|
+
if (direction === CallDirection.Incoming && debug) console.log("Receive call from", call.metadata);
|
248
|
+
else if (debug) console.log("Make call to", call.metadata);
|
249
|
+
|
250
|
+
const arr = direction === CallDirection.Incoming ? this._incomingCalls : this._outgoingCalls;
|
251
|
+
const handle = new CallHandle(userId, call, direction);
|
252
|
+
arr.push(handle);
|
253
|
+
call.on("error", err => {
|
254
|
+
console.error("Call error", err);
|
255
|
+
});
|
256
|
+
call.on("close", () => {
|
257
|
+
if (debug)
|
258
|
+
console.log("Call ended", call.metadata);
|
259
|
+
call.close();
|
260
|
+
const index = arr.indexOf(handle);
|
261
|
+
if (index !== -1)
|
262
|
+
arr.splice(index, 1);
|
263
|
+
});
|
264
|
+
|
265
|
+
handle.addEventListener(NetworkedStreamEvents.StreamEnded, e => {
|
266
|
+
this.dispatchEvent(e);
|
267
|
+
});
|
268
|
+
|
269
|
+
if (direction === CallDirection.Incoming) {
|
270
|
+
|
271
|
+
handle.addEventListener(NetworkedStreamEvents.StreamReceived, e => {
|
272
|
+
this.dispatchEvent(e);
|
273
|
+
});
|
274
|
+
|
275
|
+
call.on("stream", () => {
|
276
|
+
// workaround for https://github.com/peers/peerjs/issues/636
|
277
|
+
let intervalCounter = 0;
|
278
|
+
const closeInterval = setInterval(() => {
|
279
|
+
const isFirstInterval = intervalCounter === 0;
|
280
|
+
if (!handle.isOpen && isFirstInterval) {
|
281
|
+
intervalCounter += 1;
|
282
|
+
clearInterval(closeInterval);
|
283
|
+
handle.close();
|
284
|
+
}
|
285
|
+
}, 2000);
|
286
|
+
});
|
287
|
+
}
|
288
|
+
return handle;
|
289
|
+
}
|
290
|
+
|
291
|
+
// private onRemotePeerConnect(user: PeerUserConnectedModel) {
|
292
|
+
// console.log("other user connected", user);
|
293
|
+
// }
|
294
|
+
}
|
295
|
+
|
296
|
+
|
297
|
+
// type UserVideoCall = {
|
298
|
+
// call: Peer.MediaConnection;
|
299
|
+
// stream: MediaStream;
|
300
|
+
// userId: string;
|
301
|
+
// }
|
302
|
+
|
303
|
+
// type IncomingStreamArgs = {
|
304
|
+
// stream: MediaStream;
|
305
|
+
// userId: string;
|
306
|
+
// }
|
307
|
+
|
308
|
+
export class NetworkedStreams extends EventDispatcher {
|
309
|
+
|
310
|
+
static create(comp: IComponent) {
|
311
|
+
const peer = PeerHandle.getOrCreate(comp.context, comp.context.connection.connectionId!);
|
312
|
+
return new NetworkedStreams(comp.context, peer);
|
313
|
+
}
|
314
|
+
|
315
|
+
private readonly context: Context;
|
316
|
+
private readonly peer: PeerHandle;
|
317
|
+
|
318
|
+
// private _receiveVideoStreamListeners: Array<(info: IncomingStreamArgs) => void> = [];
|
319
|
+
private _sendingStreams: Map<MediaStream, CallHandle[]> = new Map();
|
320
|
+
|
321
|
+
debug: boolean = false;
|
322
|
+
|
323
|
+
constructor(context: Context, peer: PeerHandle) {
|
324
|
+
super();
|
325
|
+
this.context = context;
|
326
|
+
this.peer = peer;
|
327
|
+
if (debug) this.debug = true;
|
328
|
+
}
|
329
|
+
|
330
|
+
startSendingStream(stream: MediaStream) {
|
331
|
+
if (!this._sendingStreams.has(stream)) {
|
332
|
+
this._sendingStreams.set(stream, []);
|
333
|
+
this.updateSendingCalls();
|
334
|
+
}
|
335
|
+
else {
|
336
|
+
console.warn("Received start sending stream with stream that is already being sent");
|
337
|
+
}
|
338
|
+
}
|
339
|
+
|
340
|
+
stopSendingStream(_steam: MediaStream | undefined | null) {
|
341
|
+
if (_steam) {
|
342
|
+
const calls = this._sendingStreams.get(_steam);
|
343
|
+
if (calls) {
|
344
|
+
if (this.debug)
|
345
|
+
console.log("Closing calls", calls);
|
346
|
+
for (const call of calls) {
|
347
|
+
call.close();
|
348
|
+
}
|
349
|
+
}
|
350
|
+
this._sendingStreams.delete(_steam);
|
351
|
+
if (calls && this.debug)
|
352
|
+
console.log("Currently sending", this._sendingStreams);
|
353
|
+
}
|
354
|
+
}
|
355
|
+
|
356
|
+
// private onConnectRoomFn: Function = this.onConnectRoom.bind(this);
|
357
|
+
// private onUserConnectedFn: Function = this.onUserConnected.bind(this);
|
358
|
+
// private onUserLeftFn: Function = this.onUserLeft.bind(this);
|
359
|
+
|
360
|
+
enable() {
|
361
|
+
this.peer.enable();
|
362
|
+
this.peer.addEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream);
|
363
|
+
//@ts-ignore
|
364
|
+
this.peer.addEventListener(NetworkedStreamEvents.StreamEnded, this.onCallEnded);
|
365
|
+
// this.peer.addEventListener(PeerEvent.UserJoined, this.onUserJoinedPeer);
|
366
|
+
this.context.connection.beginListen(NetworkedStreamEvents.Connected, this.onUserConnected);
|
367
|
+
this.context.connection.beginListen(RoomEvents.JoinedRoom, this.onJoinedRoom);
|
368
|
+
this.context.connection.beginListen(RoomEvents.UserJoinedRoom, this.onJoinedRoom);
|
369
|
+
this.context.connection.beginListen(RoomEvents.UserLeftRoom, this.onUserLeft);
|
370
|
+
}
|
371
|
+
|
372
|
+
disable() {
|
373
|
+
this.peer.disable();
|
374
|
+
this.peer.removeEventListener(NetworkedStreamEvents.StreamReceived, this.onReceiveStream);
|
375
|
+
//@ts-ignore
|
376
|
+
this.peer.removeEventListener(NetworkedStreamEvents.StreamEnded, this.onCallEnded);
|
377
|
+
// this.peer.removeEventListener(PeerEvent.UserJoined, this.onUserJoinedPeer);
|
378
|
+
this.context.connection.stopListen(NetworkedStreamEvents.Connected, this.onUserConnected);
|
379
|
+
this.context.connection.stopListen(RoomEvents.JoinedRoom, this.onJoinedRoom);
|
380
|
+
this.context.connection.stopListen(RoomEvents.UserJoinedRoom, this.onJoinedRoom);
|
381
|
+
this.context.connection.stopListen(RoomEvents.UserLeftRoom, this.onUserLeft);
|
382
|
+
}
|
383
|
+
|
384
|
+
// private onUserJoinedPeer = (evt) => {
|
385
|
+
// if (!this.context.connection.isConnected && evt.userId) {
|
386
|
+
// this.startCallWithUserIfNotAlready(evt.userId);
|
387
|
+
// }
|
388
|
+
// }
|
389
|
+
|
390
|
+
// When either we ourselves OR someone else is joining the room we want to make sure to re-establish all calls
|
391
|
+
// and if the user that joined is not yet receiving our video stream we want to start a stream with them
|
392
|
+
// https://github.com/needle-tools/needle-tiny/issues/697#issuecomment-1510425539
|
393
|
+
private onJoinedRoom = (evt) => {
|
394
|
+
if (this.debug) console.log(`${evt.userId} joined room and I'm currently sending ${this._sendingStreams.size} streams`);
|
395
|
+
if (this._sendingStreams.size > 0)
|
396
|
+
this.updateSendingCalls();
|
397
|
+
}
|
398
|
+
|
399
|
+
private onReceiveStream = (evt) => {
|
400
|
+
if (this.debug)
|
401
|
+
console.log("RECEIVE STREAM", evt);
|
402
|
+
this.dispatchEvent({ type: NetworkedStreamEvents.StreamReceived, target: this, stream: evt.stream, userId: evt.userId });
|
403
|
+
}
|
404
|
+
|
405
|
+
private onCallEnded = (evt: StreamEndedEvent) => {
|
406
|
+
this.dispatchEvent(evt)
|
407
|
+
}
|
408
|
+
|
409
|
+
private onUserConnected = (user: PeerUserConnectedModel) => {
|
410
|
+
// console.log(this.peer.id, user.guid)
|
411
|
+
if (this.peer.id === user.guid) {
|
412
|
+
if (this.debug)
|
413
|
+
console.log("USER CONNECTED", user.guid, user);
|
414
|
+
const stream = this._sendingStreams.keys().next().value;
|
415
|
+
this.peer.makeCall(user.peerId, stream);
|
416
|
+
}
|
417
|
+
}
|
418
|
+
|
419
|
+
private onUserLeft(_: UserJoinedOrLeftRoomModel) {
|
420
|
+
this.stopCallsToUsersThatAreNotInTheRoomAnymore();
|
421
|
+
}
|
422
|
+
|
423
|
+
private updateSendingCalls() {
|
424
|
+
let startedNewCall = false;
|
425
|
+
const localUserId = this.context.connection.connectionId;
|
426
|
+
for (const stream of this._sendingStreams.keys()) {
|
427
|
+
const calls = this._sendingStreams.get(stream) || [];
|
428
|
+
for (const userId of this.context.connection.usersInRoom()) {
|
429
|
+
if (userId === localUserId) continue;
|
430
|
+
const existing = calls.find(c => c.userId === userId);
|
431
|
+
if (!existing || existing.stream?.active === false) {
|
432
|
+
if (this.debug)
|
433
|
+
console.log("Starting call to", userId, localUserId)
|
434
|
+
const handle = this.peer.makeCall(this.peer.getPeerIdFromUserId(userId), stream);
|
435
|
+
if (handle) {
|
436
|
+
startedNewCall = true;
|
437
|
+
calls.push(handle);
|
438
|
+
}
|
439
|
+
}
|
440
|
+
}
|
441
|
+
|
442
|
+
this._sendingStreams.set(stream, calls);
|
443
|
+
}
|
444
|
+
this.stopCallsToUsersThatAreNotInTheRoomAnymore();
|
445
|
+
if (startedNewCall && this.debug) {
|
446
|
+
console.log("Currently sending", this._sendingStreams);
|
447
|
+
}
|
448
|
+
}
|
449
|
+
|
450
|
+
// private startCallWithUserIfNotAlready(userId: string) {
|
451
|
+
// for (const stream of this._sendingVideoStreams.keys()) {
|
452
|
+
// const calls = this._sendingVideoStreams.get(stream) || [];
|
453
|
+
// const existing = calls.find(c => c.userId === userId);
|
454
|
+
// if (!existing || existing.stream?.active === false) {
|
455
|
+
// if (this.debug) console.log("Starting call to", userId)
|
456
|
+
// const handle = this.peer.makeCall(this.peer.getPeerIdFromUserId(userId), stream);
|
457
|
+
// if (handle) {
|
458
|
+
// calls.push(handle);
|
459
|
+
// return true;
|
460
|
+
// }
|
461
|
+
// }
|
462
|
+
// }
|
463
|
+
// return false;
|
464
|
+
// }
|
465
|
+
|
466
|
+
private stopCallsToUsersThatAreNotInTheRoomAnymore() {
|
467
|
+
for (const stream of this._sendingStreams.keys()) {
|
468
|
+
const calls = this._sendingStreams.get(stream);
|
469
|
+
if (!calls) continue;
|
470
|
+
for (let i = calls.length - 1; i >= 0; i--) {
|
471
|
+
const call = calls[i];
|
472
|
+
if (!this.context.connection.userIsInRoom(call.userId)) {
|
473
|
+
call.close();
|
474
|
+
calls.splice(i, 1);
|
475
|
+
}
|
476
|
+
}
|
477
|
+
}
|
478
|
+
}
|
479
|
+
|
480
|
+
// const call = peer.call(peerId, stream);
|
481
|
+
}
|
482
|
+
|
483
|
+
export function disposeStream(str: MediaStream | null | undefined) {
|
484
|
+
if (!str) return;
|
485
|
+
if (str instanceof MediaStream) {
|
486
|
+
for (const cap of str.getTracks())
|
487
|
+
cap.stop();
|
488
|
+
}
|
489
|
+
}
|