Needle Engine

Changes between version 3.27.5-beta and 3.28.0-beta
Files changed (35) hide show
  1. src/engine-schemes/flatc.exe +0 -0
  2. src/engine-schemes/dist/api.js +0 -17
  3. src/engine/dist/api.js +0 -73
  4. src/engine-components/dist/AudioSource.js +0 -513
  5. src/engine/dist/engine_networking_streams.js +0 -474
  6. src/engine-schemes/dist/schemes.js +0 -25
  7. src/engine-components/dist/ScreenCapture.js +0 -490
  8. src/engine-schemes/dist/synced-camera-model.js +0 -74
  9. src/engine-schemes/dist/synced-transform-model.js +0 -73
  10. src/engine-schemes/dist/transform.js +0 -46
  11. src/engine-schemes/dist/vec2.js +0 -32
  12. src/engine-schemes/dist/vec3.js +0 -36
  13. src/engine-schemes/dist/vec4.js +0 -40
  14. src/engine-components/dist/VideoPlayer.js +0 -888
  15. src/engine-components/dist/Voip2.js +0 -46
  16. src/engine-schemes/dist/vr-user-state-buffer.js +0 -110
  17. src/engine-schemes/dist/api.js.meta +0 -7
  18. src/engine/dist/api.js.meta +0 -7
  19. src/engine-components/dist/AudioSource.js.meta +0 -7
  20. src/engine/dist/engine_networking_streams.js.meta +0 -7
  21. src/engine-schemes/dist/schemes.js.meta +0 -7
  22. src/engine-components/dist/ScreenCapture.js.meta +0 -7
  23. src/engine-schemes/dist/synced-camera-model.js.meta +0 -7
  24. src/engine-schemes/dist/synced-transform-model.js.meta +0 -7
  25. src/engine-schemes/dist/transform.js.meta +0 -7
  26. src/engine-schemes/dist/vec2.js.meta +0 -7
  27. src/engine-schemes/dist/vec3.js.meta +0 -7
  28. src/engine-schemes/dist/vec4.js.meta +0 -7
  29. src/engine-components/dist/VideoPlayer.js.meta +0 -7
  30. src/engine-components/dist/Voip2.js.meta +0 -7
  31. src/engine-schemes/dist/vr-user-state-buffer.js.meta +0 -7
  32. src/engine/engine_shims.ts +0 -2
  33. src/engine/codegen/register_types.ts +2 -2
  34. src/engine-components/Renderer.ts +13 -0
  35. src/engine-components/export/usdz/ThreeUSDZExporter.ts +36 -24
src/engine-schemes/flatc.exe DELETED
File without changes
src/engine-schemes/dist/api.js DELETED
@@ -1,17 +0,0 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
- for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
- };
16
- exports.__esModule = true;
17
- __exportStar(require("./schemes.js"), exports);
src/engine/dist/api.js DELETED
@@ -1,73 +0,0 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
5
- }) : (function(o, m, k, k2) {
6
- if (k2 === undefined) k2 = k;
7
- o[k2] = m[k];
8
- }));
9
- var __exportStar = (this && this.__exportStar) || function(m, exports) {
10
- for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p);
11
- };
12
- exports.__esModule = true;
13
- __exportStar(require("./extensions/index.js"), exports);
14
- __exportStar(require("./engine_addressables.js"), exports);
15
- __exportStar(require("./engine_application.js"), exports);
16
- __exportStar(require("./engine_assetdatabase.js"), exports);
17
- __exportStar(require("./engine_create_objects.js"), exports);
18
- __exportStar(require("./engine_components_internal.js"), exports);
19
- __exportStar(require("./engine_components.js"), exports);
20
- __exportStar(require("./engine_components_internal.js"), exports);
21
- __exportStar(require("./engine_context_registry.js"), exports);
22
- __exportStar(require("./engine_context.js"), exports);
23
- __exportStar(require("./engine_coroutine.js"), exports);
24
- __exportStar(require("./engine_constants.js"), exports);
25
- __exportStar(require("./debug/index.js"), exports);
26
- __exportStar(require("./engine_element.js"), exports);
27
- __exportStar(require("./engine_element_loading.js"), exports);
28
- __exportStar(require("./engine_element_attributes.js"), exports);
29
- var engine_gizmos_js_1 = require("./engine_gizmos.js");
30
- __createBinding(exports, engine_gizmos_js_1, "Gizmos");
31
- __exportStar(require("./engine_gltf.js"), exports);
32
- __exportStar(require("./engine_hot_reload.js"), exports);
33
- __exportStar(require("./engine_gameobject.js"), exports);
34
- __exportStar(require("./engine_networking.js"), exports);
35
- __exportStar(require("./engine_networking_types.js"), exports);
36
- var engine_networking_auto_js_1 = require("./engine_networking_auto.js");
37
- __createBinding(exports, engine_networking_auto_js_1, "syncField");
38
- __exportStar(require("./engine_networking_files.js"), exports);
39
- __exportStar(require("./engine_networking_instantiate.js"), exports);
40
- __exportStar(require("./engine_networking_streams.js"), exports);
41
- __exportStar(require("./engine_networking_utils.js"), exports);
42
- __exportStar(require("./engine_networking_peer.js"), exports);
43
- __exportStar(require("./engine_patcher.js"), exports);
44
- __exportStar(require("./engine_playerview.js"), exports);
45
- __exportStar(require("./engine_physics.js"), exports);
46
- __exportStar(require("./engine_physics.types.js"), exports);
47
- __exportStar(require("./engine_physics_rapier.js"), exports);
48
- __exportStar(require("./engine_scenelighting.js"), exports);
49
- __exportStar(require("./engine_input.js"), exports);
50
- __exportStar(require("./engine_math.js"), exports);
51
- __exportStar(require("./js-extensions/index.js"), exports);
52
- __exportStar(require("./engine_scenetools.js"), exports);
53
- __exportStar(require("./engine_serialization.js"), exports);
54
- var engine_serialization_core_js_1 = require("./engine_serialization_core.js");
55
- __createBinding(exports, engine_serialization_core_js_1, "type");
56
- __exportStar(require("./engine_texture.js"), exports);
57
- __exportStar(require("./engine_three_utils.js"), exports);
58
- __exportStar(require("./engine_time.js"), exports);
59
- __exportStar(require("./engine_types.js"), exports);
60
- __exportStar(require("./engine_utils_screenshot.js"), exports);
61
- __exportStar(require("./engine_web_api.js"), exports);
62
- __exportStar(require("./engine_utils.js"), exports);
63
- var engine_typestore_js_1 = require("./engine_typestore.js");
64
- __createBinding(exports, engine_typestore_js_1, "TypeStore");
65
- __createBinding(exports, engine_typestore_js_1, "registerType");
66
- var engine_instancing_js_1 = require("./engine_instancing.js");
67
- __createBinding(exports, engine_instancing_js_1, "InstancingUtil");
68
- var engine_util_decorator_js_1 = require("./engine_util_decorator.js");
69
- __createBinding(exports, engine_util_decorator_js_1, "validate");
70
- __createBinding(exports, engine_util_decorator_js_1, "prefix");
71
- var engine_license_js_1 = require("./engine_license.js");
72
- __createBinding(exports, engine_license_js_1, "hasProLicense");
73
- __createBinding(exports, engine_license_js_1, "hasIndieLicense");
src/engine-components/dist/AudioSource.js DELETED
@@ -1,513 +0,0 @@
1
- "use strict";
2
- var __extends = (this && this.__extends) || (function () {
3
- var extendStatics = function (d, b) {
4
- extendStatics = Object.setPrototypeOf ||
5
- ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
6
- function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
7
- return extendStatics(d, b);
8
- };
9
- return function (d, b) {
10
- extendStatics(d, b);
11
- function __() { this.constructor = d; }
12
- d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
13
- };
14
- })();
15
- var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
16
- var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
17
- if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
18
- else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
19
- return c > 3 && r && Object.defineProperty(target, key, r), r;
20
- };
21
- exports.__esModule = true;
22
- exports.AudioSource = exports.AudioRolloffMode = void 0;
23
- var Component_js_1 = require("./Component.js");
24
- var PositionalAudioHelper_js_1 = require("three/examples/jsm/helpers/PositionalAudioHelper.js");
25
- var AudioListener_js_1 = require("./AudioListener.js");
26
- var utils = require("../engine/engine_utils.js");
27
- var engine_serialization_decorator_js_1 = require("../engine/engine_serialization_decorator.js");
28
- var engine_application_js_1 = require("../engine/engine_application.js");
29
- var three_1 = require("three");
30
- var debug = utils.getParam("debugaudio");
31
- var AudioRolloffMode;
32
- (function (AudioRolloffMode) {
33
- /// <summary>
34
- /// <para>Use this mode when you want a real-world rolloff.</para>
35
- /// </summary>
36
- AudioRolloffMode[AudioRolloffMode["Logarithmic"] = 0] = "Logarithmic";
37
- /// <summary>
38
- /// <para>Use this mode when you want to lower the volume of your sound over the distance.</para>
39
- /// </summary>
40
- AudioRolloffMode[AudioRolloffMode["Linear"] = 1] = "Linear";
41
- /// <summary>
42
- /// <para>Use this when you want to use a custom rolloff.</para>
43
- /// </summary>
44
- AudioRolloffMode[AudioRolloffMode["Custom"] = 2] = "Custom";
45
- })(AudioRolloffMode = exports.AudioRolloffMode || (exports.AudioRolloffMode = {}));
46
- var AudioSource = /** @class */ (function (_super) {
47
- __extends(AudioSource, _super);
48
- function AudioSource() {
49
- var _this = _super !== null && _super.apply(this, arguments) || this;
50
- _this.clip = "";
51
- _this.playOnAwake = false;
52
- _this._spatialBlend = 0;
53
- _this._minDistance = 1;
54
- _this._maxDistance = 100;
55
- _this._volume = 1;
56
- _this.rollOffMode = 0;
57
- _this.playInBackground = true;
58
- _this._loop = false;
59
- _this.sound = null;
60
- _this.helper = null;
61
- _this.wasPlaying = false;
62
- _this.audioLoader = null;
63
- _this.shouldPlay = false;
64
- // set this from audio context time, used to set clip offset when setting "time" property
65
- // there is maybe a better way to set a audio clip current time?!
66
- _this._lastClipStartedLoading = null;
67
- _this._audioElement = null;
68
- _this.onVisibilityChanged = function () {
69
- switch (document.visibilityState) {
70
- case "hidden":
71
- if (_this.playInBackground === false || utils.isMobileDevice()) {
72
- _this.wasPlaying = _this.isPlaying;
73
- if (_this.isPlaying) {
74
- _this.pause();
75
- }
76
- }
77
- break;
78
- case "visible":
79
- if (debug)
80
- console.log("visible", _this.enabled, _this.playOnAwake, !_this.isPlaying, AudioSource._userInteractionRegistered, _this.wasPlaying);
81
- if (_this.enabled && _this.playOnAwake && !_this.isPlaying && AudioSource._userInteractionRegistered && _this.wasPlaying) {
82
- _this.play();
83
- }
84
- break;
85
- }
86
- };
87
- _this.onApplicationMuteChanged = function () {
88
- var _a, _b;
89
- if (_this.context.application.muted)
90
- (_a = _this.sound) === null || _a === void 0 ? void 0 : _a.setVolume(0);
91
- else
92
- (_b = _this.sound) === null || _b === void 0 ? void 0 : _b.setVolume(_this.volume);
93
- };
94
- _this.lerp = function (x, y, a) { return x * (1 - a) + y * a; };
95
- _this.createAudio = function (buffer) {
96
- if (debug)
97
- console.log("audio buffer loaded");
98
- AudioSource.registerWaitForAllowAudio(function () {
99
- if (debug)
100
- console.log("finished loading", buffer);
101
- var sound = _this.Sound;
102
- if (!sound) {
103
- console.warn("Failed getting sound", _this.name);
104
- return;
105
- }
106
- if (sound.isPlaying)
107
- sound.stop();
108
- if (buffer)
109
- sound.setBuffer(buffer);
110
- sound.loop = _this._loop;
111
- if (_this.context.application.muted)
112
- sound.setVolume(0);
113
- else
114
- sound.setVolume(_this.volume);
115
- sound.autoplay = _this.shouldPlay;
116
- // sound.setDistanceModel('linear');
117
- // sound.setRolloffFactor(1);
118
- _this.applySpatialDistanceSettings();
119
- // sound.setDirectionalCone(180, 360, 0.1);
120
- if (sound.isPlaying)
121
- sound.stop();
122
- if (debug)
123
- console.log(_this.name, _this.shouldPlay, AudioSource.userInteractionRegistered, _this);
124
- if (_this.shouldPlay && AudioSource._userInteractionRegistered)
125
- _this.play();
126
- });
127
- };
128
- _this._lastContextTime = 0;
129
- _this._hasEnded = true;
130
- _this._needUpdateSpatialDistanceSettings = false;
131
- return _this;
132
- }
133
- Object.defineProperty(AudioSource, "userInteractionRegistered", {
134
- get: function () {
135
- if (!AudioSource._didCallBeginWaitForUserInteraction) {
136
- AudioSource._didCallBeginWaitForUserInteraction = true;
137
- AudioSource._beginWaitForUserInteraction();
138
- }
139
- return AudioSource._userInteractionRegistered;
140
- },
141
- enumerable: false,
142
- configurable: true
143
- });
144
- AudioSource.registerWaitForAllowAudio = function (cb) {
145
- if (cb !== null) {
146
- if (this._userInteractionRegistered) {
147
- cb();
148
- return;
149
- }
150
- if (this.callbacks.indexOf(cb) === -1)
151
- this.callbacks.push(cb);
152
- if (!AudioSource._didCallBeginWaitForUserInteraction) {
153
- AudioSource._didCallBeginWaitForUserInteraction = true;
154
- AudioSource._beginWaitForUserInteraction();
155
- }
156
- }
157
- };
158
- AudioSource._beginWaitForUserInteraction = function (cb) {
159
- var _this = this;
160
- if (cb === void 0) { cb = null; }
161
- if (this._userInteractionRegistered) {
162
- if (cb)
163
- cb();
164
- return;
165
- }
166
- if (cb !== null)
167
- this.registerWaitForAllowAudio(cb);
168
- var callback = function () {
169
- if (fn == undefined)
170
- return;
171
- if (AudioSource._userInteractionRegistered)
172
- return;
173
- AudioSource._userInteractionRegistered = true;
174
- if (debug)
175
- console.log("🔊 registered interaction, can play audio now");
176
- document.removeEventListener('pointerdown', fn);
177
- document.removeEventListener('click', fn);
178
- document.removeEventListener('dragstart', fn);
179
- document.removeEventListener('touchstart', fn);
180
- for (var _i = 0, _a = _this.callbacks; _i < _a.length; _i++) {
181
- var cb_1 = _a[_i];
182
- cb_1();
183
- }
184
- _this.callbacks.length = 0;
185
- };
186
- var fn = callback.bind(this);
187
- document.addEventListener('pointerdown', fn);
188
- document.addEventListener('click', fn);
189
- document.addEventListener('dragstart', fn);
190
- document.addEventListener('touchstart', fn);
191
- };
192
- Object.defineProperty(AudioSource.prototype, "loop", {
193
- get: function () {
194
- if (this.sound)
195
- this._loop = this.sound.getLoop();
196
- return this._loop;
197
- },
198
- set: function (val) {
199
- this._loop = val;
200
- if (this.sound)
201
- this.sound.setLoop(val);
202
- },
203
- enumerable: false,
204
- configurable: true
205
- });
206
- Object.defineProperty(AudioSource.prototype, "spatialBlend", {
207
- /** 0 = 2D, 1 = 3D */
208
- get: function () {
209
- return this._spatialBlend;
210
- },
211
- set: function (val) {
212
- if (val === this._spatialBlend)
213
- return;
214
- this._spatialBlend = val;
215
- this._needUpdateSpatialDistanceSettings = true;
216
- },
217
- enumerable: false,
218
- configurable: true
219
- });
220
- Object.defineProperty(AudioSource.prototype, "minDistance", {
221
- get: function () {
222
- return this._minDistance;
223
- },
224
- set: function (val) {
225
- if (this._minDistance === val)
226
- return;
227
- this._minDistance = val;
228
- this._needUpdateSpatialDistanceSettings = true;
229
- },
230
- enumerable: false,
231
- configurable: true
232
- });
233
- Object.defineProperty(AudioSource.prototype, "maxDistance", {
234
- get: function () {
235
- return this._maxDistance;
236
- },
237
- set: function (val) {
238
- if (this._maxDistance === val)
239
- return;
240
- this._maxDistance = val;
241
- this._needUpdateSpatialDistanceSettings = true;
242
- },
243
- enumerable: false,
244
- configurable: true
245
- });
246
- Object.defineProperty(AudioSource.prototype, "volume", {
247
- get: function () { return this._volume; },
248
- set: function (val) {
249
- this._volume = val;
250
- if (this.sound && !this.context.application.muted) {
251
- if (debug)
252
- console.log(this.name, "audio set volume", val);
253
- this.sound.setVolume(val);
254
- }
255
- },
256
- enumerable: false,
257
- configurable: true
258
- });
259
- Object.defineProperty(AudioSource.prototype, "Sound", {
260
- get: function () {
261
- var _a;
262
- if (!this.sound && AudioSource._userInteractionRegistered) {
263
- var listener = (_a = Component_js_1.GameObject.getComponent(this.context.mainCamera, AudioListener_js_1.AudioListener)) !== null && _a !== void 0 ? _a : Component_js_1.GameObject.findObjectOfType(AudioListener_js_1.AudioListener, this.context);
264
- if (!listener && this.context.mainCamera)
265
- listener = Component_js_1.GameObject.addNewComponent(this.context.mainCamera, AudioListener_js_1.AudioListener);
266
- if (listener === null || listener === void 0 ? void 0 : listener.listener) {
267
- this.sound = new three_1.PositionalAudio(listener.listener);
268
- this.gameObject.add(this.sound);
269
- }
270
- else if (debug)
271
- console.warn("No audio listener found in scene - can not play audio");
272
- }
273
- return this.sound;
274
- },
275
- enumerable: false,
276
- configurable: true
277
- });
278
- Object.defineProperty(AudioSource.prototype, "ShouldPlay", {
279
- get: function () { return this.shouldPlay; },
280
- enumerable: false,
281
- configurable: true
282
- });
283
- AudioSource.prototype.awake = function () {
284
- this.audioLoader = new three_1.AudioLoader();
285
- if (this.playOnAwake)
286
- this.shouldPlay = true;
287
- };
288
- AudioSource.prototype.onEnable = function () {
289
- var _this = this;
290
- if (!AudioSource._userInteractionRegistered) {
291
- AudioSource._beginWaitForUserInteraction(function () {
292
- if (_this.enabled && !_this.destroyed && _this.shouldPlay)
293
- _this.onNewClip(_this.clip);
294
- });
295
- }
296
- else if (this.playOnAwake && this.context.application.isVisible) {
297
- this.play();
298
- }
299
- globalThis.addEventListener('visibilitychange', this.onVisibilityChanged);
300
- this.context.application.addEventListener(engine_application_js_1.ApplicationEvents.MuteChanged, this.onApplicationMuteChanged);
301
- };
302
- AudioSource.prototype.onDisable = function () {
303
- globalThis.removeEventListener('visibilitychange', this.onVisibilityChanged);
304
- this.context.application.removeEventListener(engine_application_js_1.ApplicationEvents.MuteChanged, this.onApplicationMuteChanged);
305
- this.stop();
306
- };
307
- AudioSource.prototype.applySpatialDistanceSettings = function () {
308
- var sound = this.sound;
309
- if (!sound)
310
- return;
311
- this._needUpdateSpatialDistanceSettings = false;
312
- var dist = this.lerp(10 * this._maxDistance / Math.max(0.0001, this.spatialBlend), this._minDistance, this.spatialBlend);
313
- if (debug)
314
- console.log(this.name, this._minDistance, this._maxDistance, this.spatialBlend, "Ref distance=" + dist);
315
- sound.setRefDistance(dist);
316
- sound.setMaxDistance(Math.max(0.01, this._maxDistance));
317
- // https://developer.mozilla.org/en-US/docs/Web/API/PannerNode/distanceModel
318
- switch (this.rollOffMode) {
319
- case AudioRolloffMode.Logarithmic:
320
- sound.setDistanceModel('exponential');
321
- break;
322
- case AudioRolloffMode.Linear:
323
- sound.setDistanceModel('linear');
324
- break;
325
- case AudioRolloffMode.Custom:
326
- break;
327
- }
328
- if (this.spatialBlend > 0) {
329
- if (debug && !this.helper) {
330
- this.helper = new PositionalAudioHelper_js_1.PositionalAudioHelper(sound, sound.getRefDistance());
331
- sound.add(this.helper);
332
- }
333
- }
334
- else if (this.helper && this.helper.parent) {
335
- this.helper.removeFromParent();
336
- }
337
- };
338
- AudioSource.prototype.onNewClip = function (clip) {
339
- if (clip)
340
- this.clip = clip;
341
- if (typeof clip === "string") {
342
- if (debug)
343
- console.log(clip);
344
- if (clip.endsWith(".mp3") || clip.endsWith(".wav")) {
345
- if (!this.audioLoader)
346
- this.audioLoader = new three_1.AudioLoader();
347
- this.shouldPlay = true;
348
- if (this._lastClipStartedLoading === clip) {
349
- if (debug)
350
- console.log("Is currently loading:", this._lastClipStartedLoading, this);
351
- return;
352
- }
353
- this._lastClipStartedLoading = clip;
354
- if (debug)
355
- console.log("load audio", clip);
356
- this.audioLoader.load(clip, this.createAudio, function () { }, console.error);
357
- }
358
- else
359
- console.warn("Unsupported audio clip type", clip);
360
- }
361
- else {
362
- this.shouldPlay = true;
363
- this.createAudio();
364
- }
365
- };
366
- /** Play a mediastream */
367
- AudioSource.prototype.play = function (clip) {
368
- var _a, _b;
369
- if (clip === void 0) { clip = undefined; }
370
- // We only support strings and media stream
371
- // TODO: maybe we should return here if an invalid value is passed in
372
- if (clip !== undefined && typeof clip !== "string" && !(clip instanceof MediaStream)) {
373
- console.warn("Called play on AudioSource with unknown argument type", clip);
374
- clip = undefined;
375
- }
376
- // Check if we need to call load first
377
- var needsLoading = !this.sound || (clip && clip !== this.clip);
378
- if (typeof clip === "string" && !this.audioLoader)
379
- needsLoading = true;
380
- if (clip instanceof MediaStream || typeof clip === "string")
381
- this.clip = clip;
382
- if (needsLoading) {
383
- this.shouldPlay = true;
384
- this.onNewClip(clip);
385
- return;
386
- }
387
- this.shouldPlay = true;
388
- this._hasEnded = false;
389
- if (debug)
390
- console.log("play", (_a = this.sound) === null || _a === void 0 ? void 0 : _a.getVolume(), this.sound);
391
- if (this.sound && !this.sound.isPlaying) {
392
- var muted = this.context.application.muted;
393
- if (muted)
394
- this.sound.setVolume(0);
395
- if (this.clip instanceof MediaStream) {
396
- // We have to set the audio element source to the mediastream as well
397
- // otherwise it will not play for some reason...
398
- this.sound.setMediaStreamSource(this.clip);
399
- if (!this._audioElement) {
400
- this._audioElement = document.createElement('audio');
401
- this._audioElement.style.display = "none";
402
- }
403
- if (!this._audioElement.parentNode)
404
- (_b = this.context.domElement.shadowRoot) === null || _b === void 0 ? void 0 : _b.append(this._audioElement);
405
- this._audioElement.srcObject = this.clip;
406
- this._audioElement.autoplay = false;
407
- }
408
- else {
409
- if (this._audioElement)
410
- this._audioElement.remove();
411
- this.sound.play(muted ? .1 : 0);
412
- }
413
- }
414
- };
415
- AudioSource.prototype.pause = function () {
416
- var _a, _b;
417
- if (debug)
418
- console.log("Pause", this);
419
- this._hasEnded = true;
420
- this.shouldPlay = false;
421
- if (this.sound && this.sound.isPlaying && this.sound.source) {
422
- this._lastContextTime = (_a = this.sound) === null || _a === void 0 ? void 0 : _a.context.currentTime;
423
- this.sound.pause();
424
- }
425
- (_b = this._audioElement) === null || _b === void 0 ? void 0 : _b.remove();
426
- };
427
- AudioSource.prototype.stop = function () {
428
- var _a, _b;
429
- if (debug)
430
- console.log("Pause", this);
431
- this._hasEnded = true;
432
- this.shouldPlay = false;
433
- if (this.sound && this.sound.source) {
434
- this._lastContextTime = (_a = this.sound) === null || _a === void 0 ? void 0 : _a.context.currentTime;
435
- if (debug)
436
- console.log(this._lastContextTime);
437
- this.sound.stop();
438
- }
439
- (_b = this._audioElement) === null || _b === void 0 ? void 0 : _b.remove();
440
- };
441
- Object.defineProperty(AudioSource.prototype, "isPlaying", {
442
- get: function () { var _a, _b; return (_b = (_a = this.sound) === null || _a === void 0 ? void 0 : _a.isPlaying) !== null && _b !== void 0 ? _b : false; },
443
- set: function (_) { },
444
- enumerable: false,
445
- configurable: true
446
- });
447
- Object.defineProperty(AudioSource.prototype, "time", {
448
- get: function () { var _a, _b; return ((_a = this.sound) === null || _a === void 0 ? void 0 : _a.source) ? (((_b = this.sound.source) === null || _b === void 0 ? void 0 : _b.context.currentTime) - this._lastContextTime + this.sound.offset) : 0; },
449
- set: function (val) {
450
- if (this.sound) {
451
- if (val === this.sound.offset)
452
- return;
453
- var wasPlaying = this.isPlaying;
454
- this.stop();
455
- this.sound.offset = val;
456
- if (wasPlaying)
457
- this.play();
458
- }
459
- },
460
- enumerable: false,
461
- configurable: true
462
- });
463
- AudioSource.prototype.update = function () {
464
- if (this.helper) {
465
- if (this.isPlaying)
466
- this.helper.update();
467
- this.helper.visible = this.isPlaying;
468
- }
469
- if (this._needUpdateSpatialDistanceSettings) {
470
- this.applySpatialDistanceSettings();
471
- }
472
- if (this.sound && !this.sound.isPlaying && this.shouldPlay && !this._hasEnded) {
473
- this._hasEnded = true;
474
- if (debug)
475
- console.log("Audio clip ended", this.clip);
476
- this.sound.dispatchEvent({ type: 'ended', target: this });
477
- }
478
- // this.gameObject.position.x = Math.sin(time.time) * 2;
479
- // this.gameObject.position.z = Math.cos(time.time * .5) * 2;
480
- };
481
- AudioSource._didCallBeginWaitForUserInteraction = false;
482
- AudioSource.callbacks = [];
483
- AudioSource._userInteractionRegistered = false;
484
- __decorate([
485
- engine_serialization_decorator_js_1.serializable(URL)
486
- ], AudioSource.prototype, "clip");
487
- __decorate([
488
- engine_serialization_decorator_js_1.serializable()
489
- ], AudioSource.prototype, "playOnAwake");
490
- __decorate([
491
- engine_serialization_decorator_js_1.serializable()
492
- ], AudioSource.prototype, "loop");
493
- __decorate([
494
- engine_serialization_decorator_js_1.serializable()
495
- ], AudioSource.prototype, "spatialBlend");
496
- __decorate([
497
- engine_serialization_decorator_js_1.serializable()
498
- ], AudioSource.prototype, "minDistance");
499
- __decorate([
500
- engine_serialization_decorator_js_1.serializable()
501
- ], AudioSource.prototype, "maxDistance");
502
- __decorate([
503
- engine_serialization_decorator_js_1.serializable()
504
- ], AudioSource.prototype, "volume");
505
- __decorate([
506
- engine_serialization_decorator_js_1.serializable()
507
- ], AudioSource.prototype, "rollOffMode");
508
- __decorate([
509
- engine_serialization_decorator_js_1.serializable()
510
- ], AudioSource.prototype, "playInBackground");
511
- return AudioSource;
512
- }(Component_js_1.Behaviour));
513
- exports.AudioSource = AudioSource;
src/engine/dist/engine_networking_streams.js DELETED
@@ -1,474 +0,0 @@
1
- "use strict";
2
- var __extends = (this && this.__extends) || (function () {
3
- var extendStatics = function (d, b) {
4
- extendStatics = Object.setPrototypeOf ||
5
- ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
6
- function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
7
- return extendStatics(d, b);
8
- };
9
- return function (d, b) {
10
- extendStatics(d, b);
11
- function __() { this.constructor = d; }
12
- d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
13
- };
14
- })();
15
- exports.__esModule = true;
16
- exports.disposeStream = exports.NetworkedStreams = exports.PeerHandle = exports.ReceiveStreamEvent = exports.CallEndedEvent = exports.PeerEvent = void 0;
17
- var engine_networking_js_1 = require("../engine/engine_networking.js");
18
- var engine_networking_peer_js_1 = require("../engine/engine_networking_peer.js");
19
- var three_1 = require("three");
20
- var engine_utils_js_1 = require("./engine_utils.js");
21
- var debug = engine_utils_js_1.getParam("debugnetworkingstreams");
22
- var PeerEvent;
23
- (function (PeerEvent) {
24
- PeerEvent["Connected"] = "peer-user-connected";
25
- PeerEvent["ReceiveStream"] = "receive-stream";
26
- PeerEvent["CallEnded"] = "call-ended";
27
- PeerEvent["Disconnected"] = "peer-user-disconnected";
28
- PeerEvent["UserJoined"] = "user-joined";
29
- })(PeerEvent = exports.PeerEvent || (exports.PeerEvent = {}));
30
- var CallEndedEvent = /** @class */ (function () {
31
- function CallEndedEvent(userId, direction) {
32
- this.type = PeerEvent.CallEnded;
33
- this.userId = userId;
34
- this.direction = direction;
35
- }
36
- return CallEndedEvent;
37
- }());
38
- exports.CallEndedEvent = CallEndedEvent;
39
- var ReceiveStreamEvent = /** @class */ (function () {
40
- function ReceiveStreamEvent(stream, target) {
41
- this.type = PeerEvent.ReceiveStream;
42
- this.stream = stream;
43
- this.target = target;
44
- }
45
- return ReceiveStreamEvent;
46
- }());
47
- exports.ReceiveStreamEvent = ReceiveStreamEvent;
48
- var PeerUserConnectedModel = /** @class */ (function () {
49
- function PeerUserConnectedModel(handle, peerId) {
50
- // internal so server doesnt save it to persistent storage
51
- this.dontSave = true;
52
- this.guid = handle.id;
53
- this.peerId = peerId;
54
- }
55
- return PeerUserConnectedModel;
56
- }());
57
- var CallDirection;
58
- (function (CallDirection) {
59
- CallDirection["Incoming"] = "incoming";
60
- CallDirection["Outgoing"] = "outgoing";
61
- })(CallDirection || (CallDirection = {}));
62
- var CallHandle = /** @class */ (function (_super) {
63
- __extends(CallHandle, _super);
64
- function CallHandle(userId, call, direction) {
65
- var _this = _super.call(this) || this;
66
- _this._stream = null;
67
- _this._isDisposed = false;
68
- _this.userId = userId;
69
- _this.call = call;
70
- _this.direction = direction;
71
- _this._stream = null;
72
- call.on("stream", function (stream) {
73
- if (debug)
74
- console.log("Receive video", stream.getAudioTracks(), stream.getVideoTracks());
75
- _this._stream = stream;
76
- if (direction === CallDirection.Incoming) {
77
- var args = new ReceiveStreamEvent(stream, _this);
78
- _this.dispatchEvent(args);
79
- }
80
- });
81
- call.on("close", function () {
82
- _this.dispatchEvent(new CallEndedEvent(userId, direction));
83
- });
84
- return _this;
85
- }
86
- Object.defineProperty(CallHandle.prototype, "stream", {
87
- get: function () { return this._stream; },
88
- enumerable: false,
89
- configurable: true
90
- });
91
- ;
92
- CallHandle.prototype.close = function () {
93
- if (this._isDisposed)
94
- return;
95
- this._isDisposed = true;
96
- this.call.close();
97
- disposeStream(this._stream);
98
- };
99
- Object.defineProperty(CallHandle.prototype, "isOpen", {
100
- get: function () {
101
- var _a;
102
- return ((_a = this.call.peerConnection) === null || _a === void 0 ? void 0 : _a.connectionState) === "connected"; // && this._stream?.active;
103
- },
104
- enumerable: false,
105
- configurable: true
106
- });
107
- Object.defineProperty(CallHandle.prototype, "isOpening", {
108
- get: function () {
109
- var _a;
110
- return ((_a = this.call.peerConnection) === null || _a === void 0 ? void 0 : _a.connectionState) === "connecting";
111
- },
112
- enumerable: false,
113
- configurable: true
114
- });
115
- Object.defineProperty(CallHandle.prototype, "isClosed", {
116
- get: function () {
117
- return !this.isOpen;
118
- },
119
- enumerable: false,
120
- configurable: true
121
- });
122
- return CallHandle;
123
- }(three_1.EventDispatcher));
124
- var PeerHandle = /** @class */ (function (_super) {
125
- __extends(PeerHandle, _super);
126
- function PeerHandle(context, id) {
127
- var _this = _super.call(this) || this;
128
- _this._incomingCalls = [];
129
- _this._outgoingCalls = [];
130
- _this._enabled = false;
131
- _this._enabledPeer = false;
132
- _this.onConnectRoomFn = _this.onConnectRoom.bind(_this);
133
- // private onUserJoinedOrLeftRoomFn: Function = this.onUserJoinedOrLeftRoom.bind(this);
134
- _this.onPeerConnectFn = _this.onPeerConnect.bind(_this);
135
- _this.onPeerReceiveCallFn = _this.onPeerReceivingCall.bind(_this);
136
- _this.context = context;
137
- _this.id = id;
138
- _this.setupPeer();
139
- navigator["getUserMedia"] = (navigator["getUserMedia"] || navigator["webkitGetUserMedia"] ||
140
- navigator["mozGetUserMedia"] || navigator["msGetUserMedia"]);
141
- return _this;
142
- }
143
- PeerHandle.getOrCreate = function (context, guid) {
144
- // if (id === undefined) {
145
- // // randomId
146
- // id = Math.random().toFixed(5);
147
- // }
148
- if (PeerHandle.instances.has(guid))
149
- return PeerHandle.instances.get(guid);
150
- var peer = new PeerHandle(context, guid);
151
- PeerHandle.instances.set(guid, peer);
152
- return peer;
153
- };
154
- PeerHandle.prototype.getMyPeerId = function () {
155
- if (this.context.connection.connectionId)
156
- return this.getPeerIdFromUserId(this.context.connection.connectionId);
157
- return undefined;
158
- };
159
- PeerHandle.prototype.getPeerIdFromUserId = function (userConnectionId) {
160
- // we build the peer id ourselves so we dont need to wait for peer to report it
161
- return this.id + "-" + userConnectionId;
162
- };
163
- PeerHandle.prototype.getUserIdFromPeerId = function (peerId) {
164
- return peerId.substring(this.id.length + 1);
165
- };
166
- PeerHandle.prototype.makeCall = function (peerId, stream) {
167
- var _a;
168
- var opts = { metadata: { userId: this.context.connection.connectionId } };
169
- var call = (_a = this._peer) === null || _a === void 0 ? void 0 : _a.call(peerId, stream, opts);
170
- if (call)
171
- return this.registerCall(call, CallDirection.Outgoing);
172
- return undefined;
173
- };
174
- Object.defineProperty(PeerHandle.prototype, "peer", {
175
- get: function () { return this._peer; },
176
- enumerable: false,
177
- configurable: true
178
- });
179
- // private _connectionPeerIdMap : Map<string, string> = new Map();
180
- PeerHandle.prototype.enable = function () {
181
- if (this._enabled)
182
- return;
183
- this._enabled = true;
184
- this.context.connection.beginListen(engine_networking_js_1.RoomEvents.JoinedRoom, this.onConnectRoomFn);
185
- // this.context.connection.beginListen(RoomEvents.UserJoinedRoom, this.onUserJoinedOrLeftRoomFn);
186
- // this.context.connection.beginListen(RoomEvents.UserLeftRoom, this.onUserJoinedOrLeftRoomFn);
187
- this.subscribePeerEvents();
188
- };
189
- PeerHandle.prototype.disable = function () {
190
- if (!this._enabled)
191
- return;
192
- this._enabled = false;
193
- this.context.connection.stopListen(engine_networking_js_1.RoomEvents.JoinedRoom, this.onConnectRoomFn);
194
- // this.context.connection.stopListen(RoomEvents.UserJoinedRoom, this.onUserJoinedOrLeftRoomFn);
195
- // this.context.connection.stopListen(RoomEvents.UserLeftRoom, this.onUserJoinedOrLeftRoomFn);
196
- this.unsubscribePeerEvents();
197
- };
198
- PeerHandle.prototype.onConnectRoom = function () {
199
- this.setupPeer();
200
- };
201
- ;
202
- // private onUserJoinedOrLeftRoom(_: UserJoinedOrLeftRoomModel): void {
203
- // };
204
- PeerHandle.prototype.setupPeer = function () {
205
- if (!this.context.connection.connectionId)
206
- return;
207
- if (this._enabledPeer)
208
- return;
209
- this._enabledPeer = true;
210
- if (!this._peer) {
211
- var peerId = this.getMyPeerId();
212
- if (peerId)
213
- this._peer = engine_networking_peer_js_1.getPeerjsInstance(peerId);
214
- else
215
- console.error("Failed to setup peerjs because we dont have a connection id", this.context.connection.connectionId);
216
- }
217
- if (this._enabled)
218
- this.subscribePeerEvents();
219
- };
220
- PeerHandle.prototype.subscribePeerEvents = function () {
221
- if (!this._peer)
222
- return;
223
- this._peer.on("open", this.onPeerConnectFn);
224
- this._peer.on("call", this.onPeerReceiveCallFn);
225
- // this.context.connection.beginListen(PeerEvent.Connected, this.onRemotePeerConnect.bind(this));
226
- // TODO: make connection to all current active calls even if the user is not anymore in the needle room
227
- };
228
- PeerHandle.prototype.unsubscribePeerEvents = function () {
229
- if (!this._peer)
230
- return;
231
- this._peer.off("open", this.onPeerConnectFn);
232
- this._peer.off("call", this.onPeerReceiveCallFn);
233
- // this.context.connection.stopListen(PeerEvent.Connected, this.onRemotePeerConnect.bind(this));
234
- };
235
- PeerHandle.prototype.onPeerConnect = function (id) {
236
- if (debug)
237
- console.log("Peer connected as", id);
238
- this.context.connection.send(PeerEvent.Connected, new PeerUserConnectedModel(this, id));
239
- };
240
- PeerHandle.prototype.onPeerReceivingCall = function (call) {
241
- call.answer();
242
- this.registerCall(call, CallDirection.Incoming);
243
- };
244
- PeerHandle.prototype.registerCall = function (call, direction) {
245
- var _this = this;
246
- var meta = call.metadata;
247
- if (!meta || !meta.userId) {
248
- console.error("Missing call metadata", call);
249
- }
250
- var userId = meta.userId;
251
- if (direction === CallDirection.Incoming && debug)
252
- console.log("Receive call from", call.metadata);
253
- else if (debug)
254
- console.log("Make call to", call.metadata);
255
- var arr = direction === CallDirection.Incoming ? this._incomingCalls : this._outgoingCalls;
256
- var handle = new CallHandle(userId, call, direction);
257
- arr.push(handle);
258
- call.on("error", function (err) {
259
- console.error("Call error", err);
260
- });
261
- call.on("close", function () {
262
- if (debug)
263
- console.log("Call ended", call.metadata);
264
- call.close();
265
- var index = arr.indexOf(handle);
266
- if (index !== -1)
267
- arr.splice(index, 1);
268
- });
269
- handle.addEventListener(PeerEvent.CallEnded, function (e) {
270
- _this.dispatchEvent(e);
271
- });
272
- if (direction === CallDirection.Incoming) {
273
- handle.addEventListener(PeerEvent.ReceiveStream, function (e) {
274
- _this.dispatchEvent(e);
275
- });
276
- call.on("stream", function () {
277
- // workaround for https://github.com/peers/peerjs/issues/636
278
- var intervalCounter = 0;
279
- var closeInterval = setInterval(function () {
280
- var isFirstInterval = intervalCounter === 0;
281
- if (!handle.isOpen && isFirstInterval) {
282
- intervalCounter += 1;
283
- clearInterval(closeInterval);
284
- handle.close();
285
- }
286
- }, 2000);
287
- });
288
- }
289
- return handle;
290
- };
291
- PeerHandle.instances = new Map();
292
- return PeerHandle;
293
- }(three_1.EventDispatcher));
294
- exports.PeerHandle = PeerHandle;
295
- // type UserVideoCall = {
296
- // call: Peer.MediaConnection;
297
- // stream: MediaStream;
298
- // userId: string;
299
- // }
300
- // type IncomingStreamArgs = {
301
- // stream: MediaStream;
302
- // userId: string;
303
- // }
304
- var NetworkedStreams = /** @class */ (function (_super) {
305
- __extends(NetworkedStreams, _super);
306
- function NetworkedStreams(context, peer) {
307
- var _this = _super.call(this) || this;
308
- // private _receiveVideoStreamListeners: Array<(info: IncomingStreamArgs) => void> = [];
309
- _this._sendingStreams = new Map();
310
- // private onUserJoinedPeer = (evt) => {
311
- // if (!this.context.connection.isConnected && evt.userId) {
312
- // this.startCallWithUserIfNotAlready(evt.userId);
313
- // }
314
- // }
315
- // When either we ourselves OR someone else is joining the room we want to make sure to re-establish all calls
316
- // and if the user that joined is not yet receiving our video stream we want to start a stream with them
317
- // https://github.com/needle-tools/needle-tiny/issues/697#issuecomment-1510425539
318
- _this.onJoinedRoom = function (evt) {
319
- if (debug)
320
- console.log(evt.userId + " joined room and I'm currently sending " + _this._sendingStreams.size + " streams");
321
- if (_this._sendingStreams.size > 0)
322
- _this.updateSendingCalls();
323
- };
324
- _this.onReceiveStream = function (evt) {
325
- if (debug)
326
- console.log("RECEIVE VIDEO", evt);
327
- _this.dispatchEvent({ type: PeerEvent.ReceiveStream, target: _this, stream: evt.stream, userId: evt.userId });
328
- };
329
- _this.onCallEnded = function (evt) {
330
- _this.dispatchEvent(evt);
331
- };
332
- _this.onUserConnected = function (user) {
333
- // console.log(this.peer.id, user.guid)
334
- if (_this.peer.id === user.guid) {
335
- if (debug)
336
- console.log("USER CONNECTED", user.guid, user);
337
- var stream = _this._sendingStreams.keys().next().value;
338
- _this.peer.makeCall(user.peerId, stream);
339
- }
340
- };
341
- _this.context = context;
342
- _this.peer = peer;
343
- return _this;
344
- }
345
- NetworkedStreams.create = function (comp) {
346
- var peer = PeerHandle.getOrCreate(comp.context, comp.context.connection.connectionId);
347
- return new NetworkedStreams(comp.context, peer);
348
- };
349
- NetworkedStreams.prototype.startSendingStream = function (stream) {
350
- if (!this._sendingStreams.has(stream)) {
351
- this._sendingStreams.set(stream, []);
352
- this.updateSendingCalls();
353
- }
354
- ;
355
- };
356
- NetworkedStreams.prototype.stopSendingStream = function (_steam) {
357
- if (_steam) {
358
- var calls = this._sendingStreams.get(_steam);
359
- if (calls) {
360
- if (debug)
361
- console.log("Closing calls", calls);
362
- for (var _i = 0, calls_1 = calls; _i < calls_1.length; _i++) {
363
- var call = calls_1[_i];
364
- call.close();
365
- }
366
- }
367
- this._sendingStreams["delete"](_steam);
368
- if (calls && debug)
369
- console.log("Currently sending", this._sendingStreams);
370
- }
371
- };
372
- // private onConnectRoomFn: Function = this.onConnectRoom.bind(this);
373
- // private onUserConnectedFn: Function = this.onUserConnected.bind(this);
374
- // private onUserLeftFn: Function = this.onUserLeft.bind(this);
375
- NetworkedStreams.prototype.enable = function () {
376
- this.peer.enable();
377
- this.peer.addEventListener(PeerEvent.ReceiveStream, this.onReceiveStream);
378
- //@ts-ignore
379
- this.peer.addEventListener(PeerEvent.CallEnded, this.onCallEnded);
380
- // this.peer.addEventListener(PeerEvent.UserJoined, this.onUserJoinedPeer);
381
- this.context.connection.beginListen(PeerEvent.Connected, this.onUserConnected);
382
- this.context.connection.beginListen(engine_networking_js_1.RoomEvents.JoinedRoom, this.onJoinedRoom);
383
- this.context.connection.beginListen(engine_networking_js_1.RoomEvents.UserJoinedRoom, this.onJoinedRoom);
384
- this.context.connection.beginListen(engine_networking_js_1.RoomEvents.UserLeftRoom, this.onUserLeft);
385
- };
386
- NetworkedStreams.prototype.disable = function () {
387
- this.peer.disable();
388
- this.peer.removeEventListener(PeerEvent.ReceiveStream, this.onReceiveStream);
389
- //@ts-ignore
390
- this.peer.removeEventListener(PeerEvent.CallEnded, this.onCallEnded);
391
- // this.peer.removeEventListener(PeerEvent.UserJoined, this.onUserJoinedPeer);
392
- this.context.connection.stopListen(PeerEvent.Connected, this.onUserConnected);
393
- this.context.connection.stopListen(engine_networking_js_1.RoomEvents.JoinedRoom, this.onJoinedRoom);
394
- this.context.connection.stopListen(engine_networking_js_1.RoomEvents.UserJoinedRoom, this.onJoinedRoom);
395
- this.context.connection.stopListen(engine_networking_js_1.RoomEvents.UserLeftRoom, this.onUserLeft);
396
- };
397
- NetworkedStreams.prototype.onUserLeft = function (_) {
398
- this.stopCallsToUsersThatAreNotInTheRoomAnymore();
399
- };
400
- NetworkedStreams.prototype.updateSendingCalls = function () {
401
- var _a;
402
- var startedNewCall = false;
403
- var localUserId = this.context.connection.connectionId;
404
- for (var _i = 0, _b = this._sendingStreams.keys(); _i < _b.length; _i++) {
405
- var stream = _b[_i];
406
- var calls = this._sendingStreams.get(stream) || [];
407
- var _loop_1 = function (userId) {
408
- if (userId === localUserId)
409
- return "continue";
410
- var existing = calls.find(function (c) { return c.userId === userId; });
411
- if (!existing || ((_a = existing.stream) === null || _a === void 0 ? void 0 : _a.active) === false) {
412
- if (debug)
413
- console.log("Starting call to", userId, localUserId);
414
- var handle = this_1.peer.makeCall(this_1.peer.getPeerIdFromUserId(userId), stream);
415
- if (handle) {
416
- startedNewCall = true;
417
- calls.push(handle);
418
- }
419
- }
420
- };
421
- var this_1 = this;
422
- for (var _c = 0, _d = this.context.connection.usersInRoom(); _c < _d.length; _c++) {
423
- var userId = _d[_c];
424
- _loop_1(userId);
425
- }
426
- this._sendingStreams.set(stream, calls);
427
- }
428
- this.stopCallsToUsersThatAreNotInTheRoomAnymore();
429
- if (startedNewCall && debug) {
430
- console.log("Currently sending", this._sendingStreams);
431
- }
432
- };
433
- // private startCallWithUserIfNotAlready(userId: string) {
434
- // for (const stream of this._sendingVideoStreams.keys()) {
435
- // const calls = this._sendingVideoStreams.get(stream) || [];
436
- // const existing = calls.find(c => c.userId === userId);
437
- // if (!existing || existing.stream?.active === false) {
438
- // if (debug) console.log("Starting call to", userId)
439
- // const handle = this.peer.makeCall(this.peer.getPeerIdFromUserId(userId), stream);
440
- // if (handle) {
441
- // calls.push(handle);
442
- // return true;
443
- // }
444
- // }
445
- // }
446
- // return false;
447
- // }
448
- NetworkedStreams.prototype.stopCallsToUsersThatAreNotInTheRoomAnymore = function () {
449
- for (var _i = 0, _a = this._sendingStreams.keys(); _i < _a.length; _i++) {
450
- var stream = _a[_i];
451
- var calls = this._sendingStreams.get(stream);
452
- if (!calls)
453
- continue;
454
- for (var i = calls.length - 1; i >= 0; i--) {
455
- var call = calls[i];
456
- if (!this.context.connection.userIsInRoom(call.userId)) {
457
- call.close();
458
- calls.splice(i, 1);
459
- }
460
- }
461
- }
462
- };
463
- return NetworkedStreams;
464
- }(three_1.EventDispatcher));
465
- exports.NetworkedStreams = NetworkedStreams;
466
- function disposeStream(str) {
467
- if (!str)
468
- return;
469
- for (var _i = 0, _a = str.getTracks(); _i < _a.length; _i++) {
470
- var cap = _a[_i];
471
- cap.stop();
472
- }
473
- }
474
- exports.disposeStream = disposeStream;
src/engine-schemes/dist/schemes.js DELETED
@@ -1,25 +0,0 @@
1
- "use strict";
2
- exports.__esModule = true;
3
- exports.tryGetGuid = exports.tryCastBinary = exports.registerBinaryType = exports.binaryIdentifierCasts = void 0;
4
- // registry
5
- exports.binaryIdentifierCasts = {};
6
- function registerBinaryType(identifier, cast) {
7
- exports.binaryIdentifierCasts[identifier] = cast;
8
- }
9
- exports.registerBinaryType = registerBinaryType;
10
- // called by networking on receiving a new binary blob
11
- // it's just a little helper method so listeners dont have to cast to types every time
12
- function tryCastBinary(bin) {
13
- var id = bin.getBufferIdentifier();
14
- var cast = exports.binaryIdentifierCasts[id];
15
- var mod = cast(bin);
16
- return mod;
17
- }
18
- exports.tryCastBinary = tryCastBinary;
19
- function tryGetGuid(obj) {
20
- if (typeof obj["guid"] === "function") {
21
- return obj.guid();
22
- }
23
- return null;
24
- }
25
- exports.tryGetGuid = tryGetGuid;
src/engine-components/dist/ScreenCapture.js DELETED
@@ -1,490 +0,0 @@
1
- "use strict";
2
- var __extends = (this && this.__extends) || (function () {
3
- var extendStatics = function (d, b) {
4
- extendStatics = Object.setPrototypeOf ||
5
- ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
6
- function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
7
- return extendStatics(d, b);
8
- };
9
- return function (d, b) {
10
- extendStatics(d, b);
11
- function __() { this.constructor = d; }
12
- d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
13
- };
14
- })();
15
- var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
16
- var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
17
- if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
18
- else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
19
- return c > 3 && r && Object.defineProperty(target, key, r), r;
20
- };
21
- var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
22
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
23
- return new (P || (P = Promise))(function (resolve, reject) {
24
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
25
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
26
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
27
- step((generator = generator.apply(thisArg, _arguments || [])).next());
28
- });
29
- };
30
- var __generator = (this && this.__generator) || function (thisArg, body) {
31
- var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
32
- return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
33
- function verb(n) { return function (v) { return step([n, v]); }; }
34
- function step(op) {
35
- if (f) throw new TypeError("Generator is already executing.");
36
- while (_) try {
37
- if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
38
- if (y = 0, t) op = [op[0] & 2, t.value];
39
- switch (op[0]) {
40
- case 0: case 1: t = op; break;
41
- case 4: _.label++; return { value: op[1], done: false };
42
- case 5: _.label++; y = op[1]; op = [0]; continue;
43
- case 7: op = _.ops.pop(); _.trys.pop(); continue;
44
- default:
45
- if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
46
- if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
47
- if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
48
- if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
49
- if (t[2]) _.ops.pop();
50
- _.trys.pop(); continue;
51
- }
52
- op = body.call(thisArg, _);
53
- } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
54
- if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
55
- }
56
- };
57
- exports.__esModule = true;
58
- exports.ScreenCapture = exports.ScreenCaptureMode = exports.ScreenCaptureDevice = void 0;
59
- var Component_js_1 = require("./Component.js");
60
- var VideoPlayer_js_1 = require("./VideoPlayer.js");
61
- var engine_serialization_js_1 = require("../engine/engine_serialization.js");
62
- var AudioSource_js_1 = require("./AudioSource.js");
63
- var engine_utils_js_1 = require("../engine/engine_utils.js");
64
- var index_js_1 = require("../engine/debug/index.js");
65
- var engine_networking_streams_js_1 = require("../engine/engine_networking_streams.js");
66
- var debug = engine_utils_js_1.getParam("debugscreensharing");
67
- var ScreenCaptureDevice;
68
- (function (ScreenCaptureDevice) {
69
- ScreenCaptureDevice[ScreenCaptureDevice["Screen"] = 0] = "Screen";
70
- ScreenCaptureDevice[ScreenCaptureDevice["Camera"] = 1] = "Camera";
71
- /** Please note that canvas streaming might not work reliably on chrome: https://bugs.chromium.org/p/chromium/issues/detail?id=1156408 */
72
- ScreenCaptureDevice[ScreenCaptureDevice["Canvas"] = 2] = "Canvas";
73
- /** When using Microphone only the voice will be sent */
74
- ScreenCaptureDevice[ScreenCaptureDevice["Microphone"] = 3] = "Microphone";
75
- })(ScreenCaptureDevice = exports.ScreenCaptureDevice || (exports.ScreenCaptureDevice = {}));
76
- var ScreenCaptureMode;
77
- (function (ScreenCaptureMode) {
78
- ScreenCaptureMode[ScreenCaptureMode["Idle"] = 0] = "Idle";
79
- ScreenCaptureMode[ScreenCaptureMode["Sending"] = 1] = "Sending";
80
- ScreenCaptureMode[ScreenCaptureMode["Receiving"] = 2] = "Receiving";
81
- })(ScreenCaptureMode = exports.ScreenCaptureMode || (exports.ScreenCaptureMode = {}));
82
- var ScreenCapture = /** @class */ (function (_super) {
83
- __extends(ScreenCapture, _super);
84
- function ScreenCapture() {
85
- var _this = _super !== null && _super.apply(this, arguments) || this;
86
- _this.allowStartOnClick = true;
87
- // TODO: make this a property
88
- /** Note: this can not be changed while streaming */
89
- _this.device = ScreenCaptureDevice.Screen;
90
- _this._requestOpen = false;
91
- _this._currentStream = null;
92
- _this._currentMode = ScreenCaptureMode.Idle;
93
- _this.onReceiveStream = function (evt) {
94
- var _a;
95
- if (((_a = evt.stream) === null || _a === void 0 ? void 0 : _a.active) !== true)
96
- return;
97
- _this.setStream(evt.stream, ScreenCaptureMode.Receiving);
98
- };
99
- _this.onCallEnded = function (_evt) {
100
- if (debug)
101
- console.log("CALL ENDED", _this.isReceiving, _this === null || _this === void 0 ? void 0 : _this.screenspace);
102
- if (_this.isReceiving)
103
- _this.screenspace = false;
104
- };
105
- return _this;
106
- // private _cameraSelectionWindow : Window | null = null;
107
- // private openWindowToSelectCamera(){
108
- // }
109
- }
110
- ScreenCapture.prototype.onPointerEnter = function () {
111
- if (!this.allowStartOnClick)
112
- return;
113
- this.context.input.setCursorPointer();
114
- };
115
- ScreenCapture.prototype.onPointerExit = function () {
116
- if (!this.allowStartOnClick)
117
- return;
118
- this.context.input.setCursorNormal();
119
- };
120
- ScreenCapture.prototype.onPointerClick = function (evt) {
121
- var _a;
122
- if (!this.allowStartOnClick)
123
- return;
124
- if (evt && evt.pointerId !== 0)
125
- return;
126
- if (this.context.connection.isInRoom === false)
127
- return;
128
- if (this.isReceiving && ((_a = this.videoPlayer) === null || _a === void 0 ? void 0 : _a.isPlaying)) {
129
- if (this.videoPlayer)
130
- this.videoPlayer.screenspace = !this.videoPlayer.screenspace;
131
- return;
132
- }
133
- if (this.isSending) {
134
- this.close();
135
- return;
136
- }
137
- this.share();
138
- };
139
- Object.defineProperty(ScreenCapture.prototype, "videoPlayer", {
140
- get: function () { return this._videoPlayer; },
141
- set: function (val) {
142
- if (this._videoPlayer && (this.isSending || this.isReceiving)) {
143
- this._videoPlayer.stop();
144
- }
145
- this._videoPlayer = val;
146
- if (this._videoPlayer && this._currentStream && (this.isSending || this.isReceiving)) {
147
- this._videoPlayer.setVideo(this._currentStream);
148
- }
149
- },
150
- enumerable: false,
151
- configurable: true
152
- });
153
- Object.defineProperty(ScreenCapture.prototype, "screenspace", {
154
- get: function () { var _a, _b; return (_b = (_a = this.videoPlayer) === null || _a === void 0 ? void 0 : _a.screenspace) !== null && _b !== void 0 ? _b : false; },
155
- set: function (v) { if (this.videoPlayer)
156
- this.videoPlayer.screenspace = v; },
157
- enumerable: false,
158
- configurable: true
159
- });
160
- Object.defineProperty(ScreenCapture.prototype, "currentScream", {
161
- get: function () {
162
- return this._currentStream;
163
- },
164
- enumerable: false,
165
- configurable: true
166
- });
167
- Object.defineProperty(ScreenCapture.prototype, "currentMode", {
168
- get: function () {
169
- return this._currentMode;
170
- },
171
- enumerable: false,
172
- configurable: true
173
- });
174
- Object.defineProperty(ScreenCapture.prototype, "isSending", {
175
- get: function () {
176
- var _a;
177
- return ((_a = this._currentStream) === null || _a === void 0 ? void 0 : _a.active) && this._currentMode === ScreenCaptureMode.Sending;
178
- },
179
- enumerable: false,
180
- configurable: true
181
- });
182
- Object.defineProperty(ScreenCapture.prototype, "isReceiving", {
183
- get: function () {
184
- if (this._currentMode === ScreenCaptureMode.Receiving) {
185
- if (!this._currentStream || this._currentStream.active === false)
186
- return false;
187
- // if any track is still live consider it active
188
- var tracks = this._currentStream.getTracks();
189
- for (var _i = 0, tracks_1 = tracks; _i < tracks_1.length; _i++) {
190
- var track = tracks_1[_i];
191
- if (track.readyState === "live")
192
- return true;
193
- }
194
- }
195
- return false;
196
- },
197
- enumerable: false,
198
- configurable: true
199
- });
200
- Object.defineProperty(ScreenCapture.prototype, "requiresVideoPlayer", {
201
- get: function () { return this.device !== ScreenCaptureDevice.Microphone; },
202
- enumerable: false,
203
- configurable: true
204
- });
205
- ScreenCapture.prototype.awake = function () {
206
- var _this = this;
207
- if (debug)
208
- console.log("Screensharing", this.name, this);
209
- AudioSource_js_1.AudioSource.registerWaitForAllowAudio(function () {
210
- if (_this.videoPlayer && _this._currentStream && _this._currentMode === ScreenCaptureMode.Receiving) {
211
- _this.videoPlayer.playInBackground = true;
212
- _this.videoPlayer.setVideo(_this._currentStream);
213
- }
214
- });
215
- var handle = engine_networking_streams_js_1.PeerHandle.getOrCreate(this.context, this.guid);
216
- this._net = new engine_networking_streams_js_1.NetworkedStreams(this.context, handle);
217
- };
218
- ScreenCapture.prototype.onEnable = function () {
219
- var _a, _b, _c;
220
- (_a = this._net) === null || _a === void 0 ? void 0 : _a.enable();
221
- //@ts-ignore
222
- (_b = this._net) === null || _b === void 0 ? void 0 : _b.addEventListener(engine_networking_streams_js_1.PeerEvent.ReceiveStream, this.onReceiveStream);
223
- //@ts-ignore
224
- (_c = this._net) === null || _c === void 0 ? void 0 : _c.addEventListener(engine_networking_streams_js_1.PeerEvent.CallEnded, this.onCallEnded);
225
- };
226
- ScreenCapture.prototype.onDisable = function () {
227
- var _a, _b, _c;
228
- //@ts-ignore
229
- (_a = this._net) === null || _a === void 0 ? void 0 : _a.removeEventListener(engine_networking_streams_js_1.PeerEvent.ReceiveStream, this.onReceiveStream);
230
- //@ts-ignore
231
- (_b = this._net) === null || _b === void 0 ? void 0 : _b.removeEventListener(engine_networking_streams_js_1.PeerEvent.CallEnded, this.onCallEnded);
232
- (_c = this._net) === null || _c === void 0 ? void 0 : _c.disable();
233
- this.close();
234
- };
235
- /** Call to begin screensharing */
236
- ScreenCapture.prototype.share = function (opts) {
237
- var _a, _b;
238
- return __awaiter(this, void 0, void 0, function () {
239
- var settings, displayMediaOptions, videoOptions, _c, myVideo, fps, stream, myStream, err_1;
240
- return __generator(this, function (_d) {
241
- switch (_d.label) {
242
- case 0:
243
- if (opts === null || opts === void 0 ? void 0 : opts.device)
244
- this.device = opts.device;
245
- if (!this.videoPlayer && this.requiresVideoPlayer) {
246
- if (!this._videoPlayer) {
247
- this._videoPlayer = (_a = Component_js_1.GameObject.getComponent(this.gameObject, VideoPlayer_js_1.VideoPlayer)) !== null && _a !== void 0 ? _a : undefined;
248
- }
249
- if (!this.videoPlayer) {
250
- console.warn("Can not share video without a videoPlayer assigned");
251
- return [2 /*return*/];
252
- }
253
- }
254
- this._requestOpen = true;
255
- _d.label = 1;
256
- case 1:
257
- _d.trys.push([1, 9, , 10]);
258
- settings = (_b = opts === null || opts === void 0 ? void 0 : opts.constraints) !== null && _b !== void 0 ? _b : {
259
- echoCancellation: true,
260
- autoGainControl: false
261
- };
262
- displayMediaOptions = {
263
- video: settings,
264
- audio: settings
265
- };
266
- videoOptions = displayMediaOptions.video;
267
- if (videoOptions !== undefined && typeof videoOptions !== "boolean") {
268
- // Set default video settings
269
- if (!videoOptions.width)
270
- videoOptions.width = { max: 1920 };
271
- if (!videoOptions.height)
272
- videoOptions.height = { max: 1920 };
273
- if (!videoOptions.aspectRatio)
274
- videoOptions.aspectRatio = { ideal: 1.7777777778 };
275
- if (!videoOptions.frameRate)
276
- videoOptions.frameRate = { ideal: 24 };
277
- if (!videoOptions.facingMode)
278
- videoOptions.facingMode = { ideal: "user" };
279
- }
280
- _c = this.device;
281
- switch (_c) {
282
- case ScreenCaptureDevice.Camera: return [3 /*break*/, 2];
283
- case ScreenCaptureDevice.Screen: return [3 /*break*/, 3];
284
- case ScreenCaptureDevice.Canvas: return [3 /*break*/, 5];
285
- case ScreenCaptureDevice.Microphone: return [3 /*break*/, 6];
286
- }
287
- return [3 /*break*/, 8];
288
- case 2:
289
- this.tryShareUserCamera(displayMediaOptions, opts);
290
- return [3 /*break*/, 8];
291
- case 3:
292
- if (!navigator.mediaDevices.getDisplayMedia) {
293
- console.error("No getDisplayMedia support");
294
- return [2 /*return*/];
295
- }
296
- return [4 /*yield*/, navigator.mediaDevices.getDisplayMedia(displayMediaOptions)];
297
- case 4:
298
- myVideo = _d.sent();
299
- if (this._requestOpen) {
300
- this.setStream(myVideo, ScreenCaptureMode.Sending);
301
- }
302
- else
303
- engine_networking_streams_js_1.disposeStream(myVideo);
304
- return [3 /*break*/, 8];
305
- case 5:
306
- fps = 0;
307
- stream = this.context.renderer.domElement.captureStream(fps);
308
- this.setStream(stream, ScreenCaptureMode.Sending);
309
- return [3 /*break*/, 8];
310
- case 6:
311
- if (!navigator.mediaDevices.getDisplayMedia) {
312
- console.error("No getDisplayMedia support");
313
- return [2 /*return*/];
314
- }
315
- displayMediaOptions.video = false;
316
- return [4 /*yield*/, navigator.mediaDevices.getUserMedia(displayMediaOptions)];
317
- case 7:
318
- myStream = _d.sent();
319
- if (this._requestOpen) {
320
- this.setStream(myStream, ScreenCaptureMode.Sending);
321
- }
322
- else
323
- engine_networking_streams_js_1.disposeStream(myStream);
324
- return [3 /*break*/, 8];
325
- case 8: return [3 /*break*/, 10];
326
- case 9:
327
- err_1 = _d.sent();
328
- if (err_1.name === "NotAllowedError") {
329
- // user cancelled stream selection
330
- console.log("Selection cancelled");
331
- this._requestOpen = false;
332
- return [2 /*return*/];
333
- }
334
- console.error("Error opening video", err_1);
335
- return [3 /*break*/, 10];
336
- case 10: return [2 /*return*/];
337
- }
338
- });
339
- });
340
- };
341
- ScreenCapture.prototype.close = function () {
342
- var _a;
343
- this._requestOpen = false;
344
- if (this._currentStream) {
345
- if (debug)
346
- console.warn("Close current stream / disposing resources, stream was active?", this._currentStream.active);
347
- (_a = this._net) === null || _a === void 0 ? void 0 : _a.stopSendingStream(this._currentStream);
348
- engine_networking_streams_js_1.disposeStream(this._currentStream);
349
- this._currentMode = ScreenCaptureMode.Idle;
350
- this._currentStream = null;
351
- }
352
- };
353
- ScreenCapture.prototype.setStream = function (stream, mode) {
354
- var _this = this;
355
- var _a, _b, _c;
356
- if (stream === this._currentStream)
357
- return;
358
- this.close();
359
- if (!stream)
360
- return;
361
- this._currentStream = stream;
362
- this._requestOpen = true;
363
- this._currentMode = mode;
364
- var isVideoStream = this.device !== ScreenCaptureDevice.Microphone;
365
- var isSending = mode === ScreenCaptureMode.Sending;
366
- if (isVideoStream) {
367
- if (this._videoPlayer)
368
- this._videoPlayer.setVideo(stream);
369
- else
370
- console.error("No video player assigned for video stream");
371
- }
372
- else {
373
- if (!this._audioSource) {
374
- this._audioSource = new AudioSource_js_1.AudioSource();
375
- this._audioSource.spatialBlend = 0;
376
- this._audioSource.volume = 1;
377
- this.gameObject.addComponent(this._audioSource);
378
- }
379
- if (!isSending) {
380
- console.log("PLAY", stream.getAudioTracks());
381
- this._audioSource.volume = 1;
382
- (_a = this._audioSource) === null || _a === void 0 ? void 0 : _a.play(stream);
383
- }
384
- }
385
- if (isSending) {
386
- (_b = this._net) === null || _b === void 0 ? void 0 : _b.startSendingStream(stream);
387
- }
388
- // Mute audio for the video we are sending
389
- if (isSending) {
390
- if (this._videoPlayer)
391
- this._videoPlayer.muted = true;
392
- (_c = this._audioSource) === null || _c === void 0 ? void 0 : _c.stop();
393
- }
394
- var _loop_1 = function (track) {
395
- track.addEventListener("ended", function () {
396
- if (debug)
397
- console.log("Track ended", track);
398
- _this.close();
399
- });
400
- if (debug) {
401
- if (track.kind === "video") {
402
- if (isSending)
403
- console.log("Video →", track.getSettings());
404
- else
405
- console.log("Video ←", track.getSettings());
406
- }
407
- }
408
- };
409
- for (var _i = 0, _d = stream.getTracks(); _i < _d.length; _i++) {
410
- var track = _d[_i];
411
- _loop_1(track);
412
- }
413
- };
414
- ScreenCapture.prototype.tryShareUserCamera = function (opts, options) {
415
- var _a;
416
- return __awaiter(this, void 0, void 0, function () {
417
- var devices, _i, devices_1, dev, id, useDevice, userMedia, err_2;
418
- return __generator(this, function (_b) {
419
- switch (_b.label) {
420
- case 0: return [4 /*yield*/, navigator.mediaDevices.enumerateDevices()];
421
- case 1:
422
- devices = (_b.sent()).filter(function (d) { return d.kind === "videoinput"; });
423
- if (debug)
424
- console.log("Request camera", devices);
425
- _i = 0, devices_1 = devices;
426
- _b.label = 2;
427
- case 2:
428
- if (!(_i < devices_1.length)) return [3 /*break*/, 7];
429
- dev = devices_1[_i];
430
- _b.label = 3;
431
- case 3:
432
- _b.trys.push([3, 5, , 6]);
433
- if (!this._requestOpen)
434
- return [3 /*break*/, 7];
435
- if (dev.kind !== "videoinput")
436
- return [3 /*break*/, 6];
437
- id = dev.deviceId;
438
- if ((options === null || options === void 0 ? void 0 : options.deviceId) !== undefined) {
439
- if (id !== options.deviceId)
440
- return [3 /*break*/, 6];
441
- }
442
- useDevice = (_a = options === null || options === void 0 ? void 0 : options.deviceFilter) === null || _a === void 0 ? void 0 : _a.call(this, dev);
443
- if (useDevice === false)
444
- return [3 /*break*/, 6];
445
- if (opts.video !== false) {
446
- if (typeof opts.video === "undefined" || typeof opts.video === "boolean") {
447
- opts.video = {};
448
- }
449
- opts.video.deviceId = id;
450
- }
451
- return [4 /*yield*/, navigator.mediaDevices.getUserMedia(opts)];
452
- case 4:
453
- userMedia = _b.sent();
454
- if (this._requestOpen) {
455
- this.setStream(userMedia, ScreenCaptureMode.Sending);
456
- }
457
- else
458
- engine_networking_streams_js_1.disposeStream(userMedia);
459
- if (debug)
460
- console.log("Selected camera", dev);
461
- return [3 /*break*/, 7];
462
- case 5:
463
- err_2 = _b.sent();
464
- // First message is firefox, second is chrome when the video source is already in use by another app
465
- if (err_2.message === "Failed to allocate videosource" || err_2.message === "Could not start video source") {
466
- index_js_1.showBalloonWarning("Failed to start video: Try another camera (Code " + err_2.code + ")");
467
- console.warn(err_2);
468
- return [3 /*break*/, 6];
469
- }
470
- else {
471
- console.error("Failed to get user media", err_2.message, err_2.code, err_2);
472
- }
473
- return [3 /*break*/, 6];
474
- case 6:
475
- _i++;
476
- return [3 /*break*/, 2];
477
- case 7: return [2 /*return*/];
478
- }
479
- });
480
- });
481
- };
482
- __decorate([
483
- engine_serialization_js_1.serializable(VideoPlayer_js_1.VideoPlayer)
484
- ], ScreenCapture.prototype, "videoPlayer");
485
- __decorate([
486
- engine_serialization_js_1.serializable()
487
- ], ScreenCapture.prototype, "device");
488
- return ScreenCapture;
489
- }(Component_js_1.Behaviour));
490
- exports.ScreenCapture = ScreenCapture;
src/engine-schemes/dist/synced-camera-model.js DELETED
@@ -1,74 +0,0 @@
1
- "use strict";
2
- // automatically generated by the FlatBuffers compiler, do not modify
3
- exports.__esModule = true;
4
- exports.SyncedCameraModel = void 0;
5
- var flatbuffers = require("flatbuffers");
6
- var vec3_js_1 = require("./vec3.js");
7
- var SyncedCameraModel = /** @class */ (function () {
8
- function SyncedCameraModel() {
9
- this.bb = null;
10
- this.bb_pos = 0;
11
- }
12
- SyncedCameraModel.prototype.__init = function (i, bb) {
13
- this.bb_pos = i;
14
- this.bb = bb;
15
- return this;
16
- };
17
- SyncedCameraModel.getRootAsSyncedCameraModel = function (bb, obj) {
18
- return (obj || new SyncedCameraModel()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
19
- };
20
- SyncedCameraModel.getSizePrefixedRootAsSyncedCameraModel = function (bb, obj) {
21
- bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
22
- return (obj || new SyncedCameraModel()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
23
- };
24
- SyncedCameraModel.prototype.userId = function (optionalEncoding) {
25
- var offset = this.bb.__offset(this.bb_pos, 4);
26
- return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
27
- };
28
- SyncedCameraModel.prototype.guid = function (optionalEncoding) {
29
- var offset = this.bb.__offset(this.bb_pos, 6);
30
- return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
31
- };
32
- SyncedCameraModel.prototype.dontSave = function () {
33
- var offset = this.bb.__offset(this.bb_pos, 8);
34
- return offset ? !!this.bb.readInt8(this.bb_pos + offset) : false;
35
- };
36
- SyncedCameraModel.prototype.pos = function (obj) {
37
- var offset = this.bb.__offset(this.bb_pos, 10);
38
- return offset ? (obj || new vec3_js_1.Vec3()).__init(this.bb_pos + offset, this.bb) : null;
39
- };
40
- SyncedCameraModel.prototype.rot = function (obj) {
41
- var offset = this.bb.__offset(this.bb_pos, 12);
42
- return offset ? (obj || new vec3_js_1.Vec3()).__init(this.bb_pos + offset, this.bb) : null;
43
- };
44
- SyncedCameraModel.startSyncedCameraModel = function (builder) {
45
- builder.startObject(5);
46
- };
47
- SyncedCameraModel.addUserId = function (builder, userIdOffset) {
48
- builder.addFieldOffset(0, userIdOffset, 0);
49
- };
50
- SyncedCameraModel.addGuid = function (builder, guidOffset) {
51
- builder.addFieldOffset(1, guidOffset, 0);
52
- };
53
- SyncedCameraModel.addDontSave = function (builder, dontSave) {
54
- builder.addFieldInt8(2, +dontSave, +false);
55
- };
56
- SyncedCameraModel.addPos = function (builder, posOffset) {
57
- builder.addFieldStruct(3, posOffset, 0);
58
- };
59
- SyncedCameraModel.addRot = function (builder, rotOffset) {
60
- builder.addFieldStruct(4, rotOffset, 0);
61
- };
62
- SyncedCameraModel.endSyncedCameraModel = function (builder) {
63
- var offset = builder.endObject();
64
- return offset;
65
- };
66
- SyncedCameraModel.finishSyncedCameraModelBuffer = function (builder, offset) {
67
- builder.finish(offset);
68
- };
69
- SyncedCameraModel.finishSizePrefixedSyncedCameraModelBuffer = function (builder, offset) {
70
- builder.finish(offset, undefined, true);
71
- };
72
- return SyncedCameraModel;
73
- }());
74
- exports.SyncedCameraModel = SyncedCameraModel;
src/engine-schemes/dist/synced-transform-model.js DELETED
@@ -1,73 +0,0 @@
1
- "use strict";
2
- // automatically generated by the FlatBuffers compiler, do not modify
3
- exports.__esModule = true;
4
- exports.SyncedTransformModel = void 0;
5
- var flatbuffers = require("flatbuffers");
6
- var transform_js_1 = require("./transform.js");
7
- var SyncedTransformModel = /** @class */ (function () {
8
- function SyncedTransformModel() {
9
- this.bb = null;
10
- this.bb_pos = 0;
11
- }
12
- SyncedTransformModel.prototype.__init = function (i, bb) {
13
- this.bb_pos = i;
14
- this.bb = bb;
15
- return this;
16
- };
17
- SyncedTransformModel.getRootAsSyncedTransformModel = function (bb, obj) {
18
- return (obj || new SyncedTransformModel()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
19
- };
20
- SyncedTransformModel.getSizePrefixedRootAsSyncedTransformModel = function (bb, obj) {
21
- bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
22
- return (obj || new SyncedTransformModel()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
23
- };
24
- SyncedTransformModel.prototype.guid = function (optionalEncoding) {
25
- var offset = this.bb.__offset(this.bb_pos, 4);
26
- return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
27
- };
28
- /**
29
- * if the transform interpolation should be fast, this is true when the send interval is low and we want to have snappy transforms
30
- */
31
- SyncedTransformModel.prototype.fast = function () {
32
- var offset = this.bb.__offset(this.bb_pos, 6);
33
- return offset ? !!this.bb.readInt8(this.bb_pos + offset) : false;
34
- };
35
- SyncedTransformModel.prototype.transform = function (obj) {
36
- var offset = this.bb.__offset(this.bb_pos, 8);
37
- return offset ? (obj || new transform_js_1.Transform()).__init(this.bb_pos + offset, this.bb) : null;
38
- };
39
- /**
40
- * if the server should not save this info
41
- */
42
- SyncedTransformModel.prototype.dontSave = function () {
43
- var offset = this.bb.__offset(this.bb_pos, 10);
44
- return offset ? !!this.bb.readInt8(this.bb_pos + offset) : false;
45
- };
46
- SyncedTransformModel.startSyncedTransformModel = function (builder) {
47
- builder.startObject(4);
48
- };
49
- SyncedTransformModel.addGuid = function (builder, guidOffset) {
50
- builder.addFieldOffset(0, guidOffset, 0);
51
- };
52
- SyncedTransformModel.addFast = function (builder, fast) {
53
- builder.addFieldInt8(1, +fast, +false);
54
- };
55
- SyncedTransformModel.addTransform = function (builder, transformOffset) {
56
- builder.addFieldStruct(2, transformOffset, 0);
57
- };
58
- SyncedTransformModel.addDontSave = function (builder, dontSave) {
59
- builder.addFieldInt8(3, +dontSave, +false);
60
- };
61
- SyncedTransformModel.endSyncedTransformModel = function (builder) {
62
- var offset = builder.endObject();
63
- return offset;
64
- };
65
- SyncedTransformModel.finishSyncedTransformModelBuffer = function (builder, offset) {
66
- builder.finish(offset);
67
- };
68
- SyncedTransformModel.finishSizePrefixedSyncedTransformModelBuffer = function (builder, offset) {
69
- builder.finish(offset, undefined, true);
70
- };
71
- return SyncedTransformModel;
72
- }());
73
- exports.SyncedTransformModel = SyncedTransformModel;
src/engine-schemes/dist/transform.js DELETED
@@ -1,46 +0,0 @@
1
- "use strict";
2
- // automatically generated by the FlatBuffers compiler, do not modify
3
- exports.__esModule = true;
4
- exports.Transform = void 0;
5
- var vec3_js_1 = require("./vec3.js");
6
- var Transform = /** @class */ (function () {
7
- function Transform() {
8
- this.bb = null;
9
- this.bb_pos = 0;
10
- }
11
- Transform.prototype.__init = function (i, bb) {
12
- this.bb_pos = i;
13
- this.bb = bb;
14
- return this;
15
- };
16
- Transform.prototype.position = function (obj) {
17
- return (obj || new vec3_js_1.Vec3()).__init(this.bb_pos, this.bb);
18
- };
19
- Transform.prototype.rotation = function (obj) {
20
- return (obj || new vec3_js_1.Vec3()).__init(this.bb_pos + 12, this.bb);
21
- };
22
- Transform.prototype.scale = function (obj) {
23
- return (obj || new vec3_js_1.Vec3()).__init(this.bb_pos + 24, this.bb);
24
- };
25
- Transform.sizeOf = function () {
26
- return 36;
27
- };
28
- Transform.createTransform = function (builder, position_x, position_y, position_z, rotation_x, rotation_y, rotation_z, scale_x, scale_y, scale_z) {
29
- builder.prep(4, 36);
30
- builder.prep(4, 12);
31
- builder.writeFloat32(scale_z);
32
- builder.writeFloat32(scale_y);
33
- builder.writeFloat32(scale_x);
34
- builder.prep(4, 12);
35
- builder.writeFloat32(rotation_z);
36
- builder.writeFloat32(rotation_y);
37
- builder.writeFloat32(rotation_x);
38
- builder.prep(4, 12);
39
- builder.writeFloat32(position_z);
40
- builder.writeFloat32(position_y);
41
- builder.writeFloat32(position_x);
42
- return builder.offset();
43
- };
44
- return Transform;
45
- }());
46
- exports.Transform = Transform;
src/engine-schemes/dist/vec2.js DELETED
@@ -1,32 +0,0 @@
1
- "use strict";
2
- // automatically generated by the FlatBuffers compiler, do not modify
3
- exports.__esModule = true;
4
- exports.Vec2 = void 0;
5
- var Vec2 = /** @class */ (function () {
6
- function Vec2() {
7
- this.bb = null;
8
- this.bb_pos = 0;
9
- }
10
- Vec2.prototype.__init = function (i, bb) {
11
- this.bb_pos = i;
12
- this.bb = bb;
13
- return this;
14
- };
15
- Vec2.prototype.x = function () {
16
- return this.bb.readFloat32(this.bb_pos);
17
- };
18
- Vec2.prototype.y = function () {
19
- return this.bb.readFloat32(this.bb_pos + 4);
20
- };
21
- Vec2.sizeOf = function () {
22
- return 8;
23
- };
24
- Vec2.createVec2 = function (builder, x, y) {
25
- builder.prep(4, 8);
26
- builder.writeFloat32(y);
27
- builder.writeFloat32(x);
28
- return builder.offset();
29
- };
30
- return Vec2;
31
- }());
32
- exports.Vec2 = Vec2;
src/engine-schemes/dist/vec3.js DELETED
@@ -1,36 +0,0 @@
1
- "use strict";
2
- // automatically generated by the FlatBuffers compiler, do not modify
3
- exports.__esModule = true;
4
- exports.Vec3 = void 0;
5
- var Vec3 = /** @class */ (function () {
6
- function Vec3() {
7
- this.bb = null;
8
- this.bb_pos = 0;
9
- }
10
- Vec3.prototype.__init = function (i, bb) {
11
- this.bb_pos = i;
12
- this.bb = bb;
13
- return this;
14
- };
15
- Vec3.prototype.x = function () {
16
- return this.bb.readFloat32(this.bb_pos);
17
- };
18
- Vec3.prototype.y = function () {
19
- return this.bb.readFloat32(this.bb_pos + 4);
20
- };
21
- Vec3.prototype.z = function () {
22
- return this.bb.readFloat32(this.bb_pos + 8);
23
- };
24
- Vec3.sizeOf = function () {
25
- return 12;
26
- };
27
- Vec3.createVec3 = function (builder, x, y, z) {
28
- builder.prep(4, 12);
29
- builder.writeFloat32(z);
30
- builder.writeFloat32(y);
31
- builder.writeFloat32(x);
32
- return builder.offset();
33
- };
34
- return Vec3;
35
- }());
36
- exports.Vec3 = Vec3;
src/engine-schemes/dist/vec4.js DELETED
@@ -1,40 +0,0 @@
1
- "use strict";
2
- // automatically generated by the FlatBuffers compiler, do not modify
3
- exports.__esModule = true;
4
- exports.Vec4 = void 0;
5
- var Vec4 = /** @class */ (function () {
6
- function Vec4() {
7
- this.bb = null;
8
- this.bb_pos = 0;
9
- }
10
- Vec4.prototype.__init = function (i, bb) {
11
- this.bb_pos = i;
12
- this.bb = bb;
13
- return this;
14
- };
15
- Vec4.prototype.x = function () {
16
- return this.bb.readFloat32(this.bb_pos);
17
- };
18
- Vec4.prototype.y = function () {
19
- return this.bb.readFloat32(this.bb_pos + 4);
20
- };
21
- Vec4.prototype.z = function () {
22
- return this.bb.readFloat32(this.bb_pos + 8);
23
- };
24
- Vec4.prototype.w = function () {
25
- return this.bb.readFloat32(this.bb_pos + 12);
26
- };
27
- Vec4.sizeOf = function () {
28
- return 16;
29
- };
30
- Vec4.createVec4 = function (builder, x, y, z, w) {
31
- builder.prep(4, 16);
32
- builder.writeFloat32(w);
33
- builder.writeFloat32(z);
34
- builder.writeFloat32(y);
35
- builder.writeFloat32(x);
36
- return builder.offset();
37
- };
38
- return Vec4;
39
- }());
40
- exports.Vec4 = Vec4;
src/engine-components/dist/VideoPlayer.js DELETED
@@ -1,888 +0,0 @@
1
- "use strict";
2
- var __extends = (this && this.__extends) || (function () {
3
- var extendStatics = function (d, b) {
4
- extendStatics = Object.setPrototypeOf ||
5
- ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
6
- function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
7
- return extendStatics(d, b);
8
- };
9
- return function (d, b) {
10
- extendStatics(d, b);
11
- function __() { this.constructor = d; }
12
- d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
13
- };
14
- })();
15
- var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
16
- var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
17
- if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
18
- else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
19
- return c > 3 && r && Object.defineProperty(target, key, r), r;
20
- };
21
- var __generator = (this && this.__generator) || function (thisArg, body) {
22
- var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
23
- return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
24
- function verb(n) { return function (v) { return step([n, v]); }; }
25
- function step(op) {
26
- if (f) throw new TypeError("Generator is already executing.");
27
- while (_) try {
28
- if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
29
- if (y = 0, t) op = [op[0] & 2, t.value];
30
- switch (op[0]) {
31
- case 0: case 1: t = op; break;
32
- case 4: _.label++; return { value: op[1], done: false };
33
- case 5: _.label++; y = op[1]; op = [0]; continue;
34
- case 7: op = _.ops.pop(); _.trys.pop(); continue;
35
- default:
36
- if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
37
- if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
38
- if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
39
- if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
40
- if (t[2]) _.ops.pop();
41
- _.trys.pop(); continue;
42
- }
43
- op = body.call(thisArg, _);
44
- } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
45
- if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
46
- }
47
- };
48
- exports.__esModule = true;
49
- exports.VideoPlayer = exports.VideoRenderMode = exports.VideoAudioOutputMode = exports.VideoSource = exports.AspectMode = void 0;
50
- var Component_js_1 = require("./Component.js");
51
- var engine_serialization_decorator_js_1 = require("../engine/engine_serialization_decorator.js");
52
- var three_1 = require("three");
53
- var engine_input_utils_js_1 = require("../engine/engine_input_utils.js");
54
- var engine_utils_js_1 = require("../engine/engine_utils.js");
55
- var Renderer_js_1 = require("./Renderer.js");
56
- var engine_three_utils_js_1 = require("../engine/engine_three_utils.js");
57
- var engine_create_objects_js_1 = require("../engine/engine_create_objects.js");
58
- var index_js_1 = require("../engine/debug/index.js");
59
- var debug = engine_utils_js_1.getParam("debugvideo");
60
- var AspectMode;
61
- (function (AspectMode) {
62
- AspectMode[AspectMode["None"] = 0] = "None";
63
- AspectMode[AspectMode["AdjustHeight"] = 1] = "AdjustHeight";
64
- AspectMode[AspectMode["AdjustWidth"] = 2] = "AdjustWidth";
65
- })(AspectMode = exports.AspectMode || (exports.AspectMode = {}));
66
- var VideoSource;
67
- (function (VideoSource) {
68
- /// <summary>
69
- /// <para>Use the current clip as the video content source.</para>
70
- /// </summary>
71
- VideoSource[VideoSource["VideoClip"] = 0] = "VideoClip";
72
- /// <summary>
73
- /// <para>Use the current URL as the video content source.</para>
74
- /// </summary>
75
- VideoSource[VideoSource["Url"] = 1] = "Url";
76
- })(VideoSource = exports.VideoSource || (exports.VideoSource = {}));
77
- var VideoAudioOutputMode;
78
- (function (VideoAudioOutputMode) {
79
- VideoAudioOutputMode[VideoAudioOutputMode["None"] = 0] = "None";
80
- VideoAudioOutputMode[VideoAudioOutputMode["AudioSource"] = 1] = "AudioSource";
81
- VideoAudioOutputMode[VideoAudioOutputMode["Direct"] = 2] = "Direct";
82
- VideoAudioOutputMode[VideoAudioOutputMode["APIOnly"] = 3] = "APIOnly";
83
- })(VideoAudioOutputMode = exports.VideoAudioOutputMode || (exports.VideoAudioOutputMode = {}));
84
- var VideoRenderMode;
85
- (function (VideoRenderMode) {
86
- VideoRenderMode[VideoRenderMode["CameraFarPlane"] = 0] = "CameraFarPlane";
87
- VideoRenderMode[VideoRenderMode["CameraNearPlane"] = 1] = "CameraNearPlane";
88
- VideoRenderMode[VideoRenderMode["RenderTexture"] = 2] = "RenderTexture";
89
- VideoRenderMode[VideoRenderMode["MaterialOverride"] = 3] = "MaterialOverride";
90
- })(VideoRenderMode = exports.VideoRenderMode || (exports.VideoRenderMode = {}));
91
- var VideoPlayer = /** @class */ (function (_super) {
92
- __extends(VideoPlayer, _super);
93
- function VideoPlayer() {
94
- var _this = _super.call(this) || this;
95
- _this.playOnAwake = true;
96
- _this.aspectMode = AspectMode.None;
97
- _this.clip = null;
98
- _this.time = 0;
99
- _this._playbackSpeed = 1;
100
- _this._isLooping = false;
101
- _this._muted = false;
102
- _this._audioOutputMode = VideoAudioOutputMode.Direct;
103
- /** Set this to false to pause video playback while the tab is not active */
104
- _this.playInBackground = true;
105
- _this._crossOrigin = "anonymous";
106
- // set a default src, this should not be undefined
107
- _this.source = VideoSource.Url;
108
- _this.url = null;
109
- _this._videoElement = null;
110
- _this._videoTexture = null;
111
- _this._videoMaterial = null;
112
- _this._isPlaying = false;
113
- _this.wasPlaying = false;
114
- _this.visibilityChanged = function (_) {
115
- switch (document.visibilityState) {
116
- case "hidden":
117
- if (!_this.playInBackground) {
118
- _this.wasPlaying = _this._isPlaying;
119
- _this.pause();
120
- }
121
- break;
122
- case "visible":
123
- if (_this.wasPlaying && !_this._isPlaying)
124
- _this.play();
125
- break;
126
- }
127
- };
128
- _this._receivedInput = false;
129
- _this._overlay = null;
130
- _this._updateAspectRoutineId = -1;
131
- engine_input_utils_js_1.awaitInput(function () {
132
- _this._receivedInput = true;
133
- _this.updateVideoElementSettings();
134
- });
135
- _this._targetObjects = [];
136
- if (engine_utils_js_1.getParam("videoscreenspace")) {
137
- window.addEventListener("keydown", function (evt) {
138
- if (evt.key === "f") {
139
- _this.screenspace = !_this.screenspace;
140
- }
141
- });
142
- }
143
- return _this;
144
- }
145
- Object.defineProperty(VideoPlayer.prototype, "playbackSpeed", {
146
- get: function () {
147
- var _a, _b;
148
- return (_b = (_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.playbackRate) !== null && _b !== void 0 ? _b : this._playbackSpeed;
149
- },
150
- set: function (val) {
151
- this._playbackSpeed = val;
152
- if (this._videoElement)
153
- this._videoElement.playbackRate = val;
154
- },
155
- enumerable: false,
156
- configurable: true
157
- });
158
- Object.defineProperty(VideoPlayer.prototype, "isLooping", {
159
- get: function () {
160
- var _a, _b;
161
- return (_b = (_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.loop) !== null && _b !== void 0 ? _b : this._isLooping;
162
- },
163
- set: function (val) {
164
- this._isLooping = val;
165
- if (this._videoElement)
166
- this._videoElement.loop = val;
167
- },
168
- enumerable: false,
169
- configurable: true
170
- });
171
- Object.defineProperty(VideoPlayer.prototype, "currentTime", {
172
- get: function () {
173
- var _a, _b;
174
- return (_b = (_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.currentTime) !== null && _b !== void 0 ? _b : this.time;
175
- },
176
- set: function (val) {
177
- if (this._videoElement) {
178
- this._videoElement.currentTime = val;
179
- }
180
- else
181
- this.time = val;
182
- },
183
- enumerable: false,
184
- configurable: true
185
- });
186
- Object.defineProperty(VideoPlayer.prototype, "isPlaying", {
187
- get: function () {
188
- var video = this._videoElement;
189
- if (video) {
190
- if (video.currentTime > 0 && !video.paused && !video.ended
191
- && video.readyState > video.HAVE_CURRENT_DATA)
192
- return true;
193
- else if (video.srcObject) {
194
- var stream = video.srcObject;
195
- if (stream.active)
196
- return true;
197
- }
198
- }
199
- return false;
200
- },
201
- enumerable: false,
202
- configurable: true
203
- });
204
- Object.defineProperty(VideoPlayer.prototype, "crossOrigin", {
205
- get: function () {
206
- var _a, _b;
207
- return (_b = (_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.crossOrigin) !== null && _b !== void 0 ? _b : this._crossOrigin;
208
- },
209
- set: function (val) {
210
- this._crossOrigin = val;
211
- if (this._videoElement) {
212
- if (val !== null)
213
- this._videoElement.setAttribute("crossorigin", val);
214
- else
215
- this._videoElement.removeAttribute("crossorigin");
216
- }
217
- },
218
- enumerable: false,
219
- configurable: true
220
- });
221
- Object.defineProperty(VideoPlayer.prototype, "videoMaterial", {
222
- get: function () {
223
- return this._videoMaterial;
224
- },
225
- enumerable: false,
226
- configurable: true
227
- });
228
- Object.defineProperty(VideoPlayer.prototype, "videoTexture", {
229
- get: function () {
230
- return this._videoTexture;
231
- },
232
- enumerable: false,
233
- configurable: true
234
- });
235
- Object.defineProperty(VideoPlayer.prototype, "videoElement", {
236
- get: function () {
237
- return this._videoElement;
238
- },
239
- enumerable: false,
240
- configurable: true
241
- });
242
- Object.defineProperty(VideoPlayer.prototype, "muted", {
243
- get: function () {
244
- var _a, _b;
245
- return (_b = (_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.muted) !== null && _b !== void 0 ? _b : this._muted;
246
- },
247
- set: function (val) {
248
- this._muted = val;
249
- if (this._videoElement)
250
- this._videoElement.muted = val;
251
- },
252
- enumerable: false,
253
- configurable: true
254
- });
255
- Object.defineProperty(VideoPlayer.prototype, "audioOutputMode", {
256
- get: function () { return this._audioOutputMode; },
257
- set: function (mode) {
258
- if (mode !== this._audioOutputMode) {
259
- if (mode === VideoAudioOutputMode.AudioSource && index_js_1.isDevEnvironment())
260
- console.warn("VideoAudioOutputMode.AudioSource is not yet implemented");
261
- this._audioOutputMode = mode;
262
- this.updateVideoElementSettings();
263
- }
264
- },
265
- enumerable: false,
266
- configurable: true
267
- });
268
- VideoPlayer.prototype.setVideo = function (video) {
269
- this.clip = video;
270
- this.source = VideoSource.VideoClip;
271
- if (!this._videoElement)
272
- this.create(true);
273
- else {
274
- // TODO: how to prevent interruption error when another video is already playing
275
- this._videoElement.srcObject = video;
276
- if (this._isPlaying)
277
- this.play();
278
- this.updateAspect();
279
- }
280
- };
281
- VideoPlayer.prototype.setClipURL = function (url) {
282
- if (this.url === url)
283
- return;
284
- // console.log("SET URL", url);
285
- this.url = url;
286
- this.source = VideoSource.Url;
287
- if (debug)
288
- console.log("set url", url);
289
- if (!this._videoElement)
290
- this.create(true);
291
- else {
292
- this._videoElement.src = url;
293
- if (this._isPlaying) {
294
- this.stop();
295
- this.play();
296
- }
297
- }
298
- };
299
- VideoPlayer.prototype.onEnable = function () {
300
- var _a, _b;
301
- if (debug)
302
- console.log("VideoPlayer.onEnable", this);
303
- window.addEventListener('visibilitychange', this.visibilityChanged);
304
- if (this.playOnAwake === true) {
305
- this.create(true);
306
- }
307
- if (this.screenspace) {
308
- (_a = this._overlay) === null || _a === void 0 ? void 0 : _a.start();
309
- }
310
- else
311
- (_b = this._overlay) === null || _b === void 0 ? void 0 : _b.stop();
312
- };
313
- VideoPlayer.prototype.onDisable = function () {
314
- var _a;
315
- window.removeEventListener('visibilitychange', this.visibilityChanged);
316
- (_a = this._overlay) === null || _a === void 0 ? void 0 : _a.stop();
317
- this.pause();
318
- };
319
- VideoPlayer.prototype.onDestroy = function () {
320
- var _a;
321
- if (this._videoElement) {
322
- (_a = this._videoElement.parentElement) === null || _a === void 0 ? void 0 : _a.removeChild(this._videoElement);
323
- this._videoElement = null;
324
- }
325
- if (this._videoTexture) {
326
- this._videoTexture.dispose();
327
- this._videoTexture = null;
328
- }
329
- };
330
- VideoPlayer.prototype.play = function () {
331
- var _this = this;
332
- var _a, _b;
333
- if (!this._videoElement)
334
- this.create(false);
335
- if (!this._videoElement)
336
- return;
337
- if (this._isPlaying && !((_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.ended) && !((_b = this._videoElement) === null || _b === void 0 ? void 0 : _b.paused))
338
- return;
339
- this._isPlaying = true;
340
- if (!this._receivedInput)
341
- this._videoElement.muted = true;
342
- this.updateVideoElementSettings();
343
- this._videoElement.currentTime = this.time;
344
- this._videoElement.play()["catch"](function (err) {
345
- var _a;
346
- console.log(err);
347
- // https://developer.chrome.com/blog/play-request-was-interrupted/
348
- if (debug)
349
- console.error("Error playing video", err, "CODE=" + err.code, (_a = _this.videoElement) === null || _a === void 0 ? void 0 : _a.src, _this);
350
- setTimeout(function () {
351
- if (_this._isPlaying && !_this.destroyed && _this.activeAndEnabled)
352
- _this.play();
353
- }, 1000);
354
- });
355
- if (debug)
356
- console.log("play", this._videoElement, this.time);
357
- };
358
- VideoPlayer.prototype.stop = function () {
359
- this._isPlaying = false;
360
- this.time = 0;
361
- if (!this._videoElement)
362
- return;
363
- this._videoElement.currentTime = 0;
364
- this._videoElement.pause();
365
- if (debug)
366
- console.log("STOP", this);
367
- };
368
- VideoPlayer.prototype.pause = function () {
369
- var _a, _b, _c;
370
- this.time = (_b = (_a = this._videoElement) === null || _a === void 0 ? void 0 : _a.currentTime) !== null && _b !== void 0 ? _b : 0;
371
- this._isPlaying = false;
372
- (_c = this._videoElement) === null || _c === void 0 ? void 0 : _c.pause();
373
- if (debug)
374
- console.log("PAUSE", this, this.currentTime);
375
- };
376
- VideoPlayer.prototype.create = function (playAutomatically) {
377
- var _a;
378
- var src;
379
- switch (this.source) {
380
- case VideoSource.VideoClip:
381
- src = this.clip;
382
- break;
383
- case VideoSource.Url:
384
- src = this.url;
385
- if (!(src === null || src === void 0 ? void 0 : src.length) && typeof this.clip === "string")
386
- src = this.clip;
387
- break;
388
- }
389
- if (!src) {
390
- if (debug)
391
- console.warn("No video source set", this);
392
- return;
393
- }
394
- if (!this._videoElement) {
395
- if (debug)
396
- console.warn("Create VideoElement", this);
397
- this._videoElement = this.createVideoElement();
398
- (_a = this.context.domElement) === null || _a === void 0 ? void 0 : _a.prepend(this._videoElement);
399
- // hide it because otherwise it would overlay the website with default css
400
- this.updateVideoElementStyles();
401
- }
402
- if (typeof src === "string") {
403
- if (debug)
404
- console.log("Set Video src", src);
405
- this._videoElement.src = src;
406
- // Nor sure why we did this here, but with this code the video does not restart when being paused / enable toggled
407
- // const str = this._videoElement["captureStream"]?.call(this._videoElement);
408
- // this.clip = str;
409
- }
410
- else {
411
- if (debug)
412
- console.log("Set Video srcObject", src);
413
- this._videoElement.srcObject = src;
414
- }
415
- if (!this._videoTexture)
416
- this._videoTexture = new three_1.VideoTexture(this._videoElement);
417
- this._videoTexture.flipY = false;
418
- this._videoTexture.colorSpace = three_1.SRGBColorSpace;
419
- this.handleBeginPlaying(playAutomatically);
420
- if (debug)
421
- console.log(this, playAutomatically);
422
- };
423
- VideoPlayer.prototype.updateAspect = function () {
424
- if (this.aspectMode === AspectMode.None)
425
- return;
426
- this.startCoroutine(this.updateAspectImpl());
427
- };
428
- Object.defineProperty(VideoPlayer.prototype, "screenspace", {
429
- get: function () {
430
- var _a, _b;
431
- return (_b = (_a = this._overlay) === null || _a === void 0 ? void 0 : _a.enabled) !== null && _b !== void 0 ? _b : false;
432
- },
433
- set: function (val) {
434
- var _a;
435
- if (val) {
436
- if (!this._videoTexture)
437
- return;
438
- if (!this._overlay)
439
- this._overlay = new VideoOverlay(this.context);
440
- this._overlay.add(this._videoTexture);
441
- }
442
- else
443
- (_a = this._overlay) === null || _a === void 0 ? void 0 : _a.remove(this._videoTexture);
444
- if (this._overlay)
445
- this._overlay.enabled = val;
446
- },
447
- enumerable: false,
448
- configurable: true
449
- });
450
- VideoPlayer.prototype.createVideoElement = function () {
451
- var video = document.createElement("video");
452
- if (this._crossOrigin)
453
- video.setAttribute("crossorigin", this._crossOrigin);
454
- if (debug)
455
- console.log("created video element", video);
456
- return video;
457
- };
458
- VideoPlayer.prototype.handleBeginPlaying = function (playAutomatically) {
459
- var _a, _b;
460
- if (!this.enabled)
461
- return;
462
- if (!this._videoElement)
463
- return;
464
- this._targetObjects.length = 0;
465
- var target = this.gameObject;
466
- switch (this.renderMode) {
467
- case VideoRenderMode.MaterialOverride:
468
- target = (_a = this.targetMaterialRenderer) === null || _a === void 0 ? void 0 : _a.gameObject;
469
- if (!target)
470
- target = (_b = Component_js_1.GameObject.getComponent(this.gameObject, Renderer_js_1.Renderer)) === null || _b === void 0 ? void 0 : _b.gameObject;
471
- break;
472
- case VideoRenderMode.RenderTexture:
473
- console.error("VideoPlayer renderTexture not implemented yet. Please use material override instead");
474
- return;
475
- }
476
- if (!target) {
477
- console.error("Missing target for video material renderer", this.name, VideoRenderMode[this.renderMode], this);
478
- return;
479
- }
480
- var mat = target["material"];
481
- if (mat) {
482
- this._targetObjects.push(target);
483
- if (mat !== this._videoMaterial) {
484
- this._videoMaterial = mat.clone();
485
- target["material"] = this._videoMaterial;
486
- }
487
- var fieldName = "map";
488
- var videoMaterial = this._videoMaterial;
489
- if (!this.targetMaterialProperty) {
490
- videoMaterial[fieldName] = this._videoTexture;
491
- }
492
- else {
493
- switch (this.targetMaterialProperty) {
494
- default:
495
- videoMaterial[fieldName] = this._videoTexture;
496
- break;
497
- // doesnt render:
498
- // case "emissiveTexture":
499
- // console.log(this.videoMaterial);
500
- // // (this.videoMaterial as any).map = this.videoTexture;
501
- // (this.videoMaterial as any).emissive?.set(1,1,1);// = this.videoTexture;
502
- // (this.videoMaterial as any).emissiveMap = this.videoTexture;
503
- // break;
504
- }
505
- }
506
- }
507
- else {
508
- console.warn("Can not play video, no material found, this might be a multimaterial case which is not supported yet");
509
- return;
510
- }
511
- this.updateVideoElementSettings();
512
- this.updateVideoElementStyles();
513
- if (playAutomatically)
514
- this.play();
515
- };
516
- VideoPlayer.prototype.updateVideoElementSettings = function () {
517
- if (!this._videoElement)
518
- return;
519
- this._videoElement.loop = this._isLooping;
520
- this._videoElement.currentTime = this.currentTime;
521
- this._videoElement.playbackRate = this._playbackSpeed;
522
- // dont open in fullscreen on ios
523
- this._videoElement.playsInline = true;
524
- var muted = !this._receivedInput || this.audioOutputMode === VideoAudioOutputMode.None;
525
- if (!muted && this._muted)
526
- muted = true;
527
- this._videoElement.muted = muted;
528
- if (this.playOnAwake)
529
- this._videoElement.autoplay = true;
530
- };
531
- VideoPlayer.prototype.updateVideoElementStyles = function () {
532
- if (!this._videoElement)
533
- return;
534
- // set style here so preview frame is rendered
535
- // set display and selectable because otherwise is interfers with input/focus e.g. breaks orbit control
536
- this._videoElement.style.userSelect = "none";
537
- this._videoElement.style.visibility = "hidden";
538
- this._videoElement.style.display = "none";
539
- this.updateAspect();
540
- };
541
- VideoPlayer.prototype.updateAspectImpl = function () {
542
- var id, lastAspect, stream, aspect, _i, _a, track, settings, i, _b, _c, obj, worldAspect, parentScale, i;
543
- return __generator(this, function (_d) {
544
- switch (_d.label) {
545
- case 0:
546
- id = ++this._updateAspectRoutineId;
547
- lastAspect = undefined;
548
- stream = this.clip;
549
- _d.label = 1;
550
- case 1:
551
- if (!(id === this._updateAspectRoutineId && this.aspectMode !== AspectMode.None && this.clip && stream === this.clip && this._isPlaying)) return [3 /*break*/, 13];
552
- if (!stream || typeof stream === "string") {
553
- return [2 /*return*/];
554
- }
555
- aspect = undefined;
556
- for (_i = 0, _a = stream.getVideoTracks(); _i < _a.length; _i++) {
557
- track = _a[_i];
558
- settings = track.getSettings();
559
- if (settings && settings.width && settings.height) {
560
- aspect = settings.width / settings.height;
561
- break;
562
- }
563
- // on firefox capture canvas stream works but looks like
564
- // the canvas stream track doesnt contain settings?!!?
565
- else {
566
- aspect = this.context.renderer.domElement.clientWidth / this.context.renderer.domElement.clientHeight;
567
- }
568
- }
569
- if (!(aspect === undefined)) return [3 /*break*/, 6];
570
- i = 0;
571
- _d.label = 2;
572
- case 2:
573
- if (!(i < 10)) return [3 /*break*/, 5];
574
- return [4 /*yield*/];
575
- case 3:
576
- _d.sent();
577
- _d.label = 4;
578
- case 4:
579
- i++;
580
- return [3 /*break*/, 2];
581
- case 5:
582
- if (!this.isPlaying)
583
- return [3 /*break*/, 13];
584
- return [3 /*break*/, 1];
585
- case 6:
586
- if (!(lastAspect === aspect)) return [3 /*break*/, 8];
587
- return [4 /*yield*/];
588
- case 7:
589
- _d.sent();
590
- return [3 /*break*/, 1];
591
- case 8:
592
- for (_b = 0, _c = this._targetObjects; _b < _c.length; _b++) {
593
- obj = _c[_b];
594
- worldAspect = 1;
595
- if (obj.parent) {
596
- parentScale = engine_three_utils_js_1.getWorldScale(obj.parent);
597
- worldAspect = parentScale.x / parentScale.y;
598
- }
599
- switch (this.aspectMode) {
600
- case AspectMode.AdjustHeight:
601
- obj.scale.y = 1 / aspect * obj.scale.x * worldAspect;
602
- break;
603
- case AspectMode.AdjustWidth:
604
- obj.scale.x = aspect * obj.scale.y * worldAspect;
605
- break;
606
- }
607
- }
608
- i = 0;
609
- _d.label = 9;
610
- case 9:
611
- if (!(i < 3)) return [3 /*break*/, 12];
612
- return [4 /*yield*/];
613
- case 10:
614
- _d.sent();
615
- _d.label = 11;
616
- case 11:
617
- i++;
618
- return [3 /*break*/, 9];
619
- case 12: return [3 /*break*/, 1];
620
- case 13: return [2 /*return*/];
621
- }
622
- });
623
- };
624
- __decorate([
625
- engine_serialization_decorator_js_1.serializable()
626
- ], VideoPlayer.prototype, "playOnAwake");
627
- __decorate([
628
- engine_serialization_decorator_js_1.serializable()
629
- ], VideoPlayer.prototype, "aspectMode");
630
- __decorate([
631
- engine_serialization_decorator_js_1.serializable(URL)
632
- ], VideoPlayer.prototype, "clip");
633
- __decorate([
634
- engine_serialization_decorator_js_1.serializable()
635
- ], VideoPlayer.prototype, "renderMode");
636
- __decorate([
637
- engine_serialization_decorator_js_1.serializable()
638
- ], VideoPlayer.prototype, "targetMaterialProperty");
639
- __decorate([
640
- engine_serialization_decorator_js_1.serializable(Renderer_js_1.Renderer)
641
- ], VideoPlayer.prototype, "targetMaterialRenderer");
642
- __decorate([
643
- engine_serialization_decorator_js_1.serializable(three_1.Texture)
644
- ], VideoPlayer.prototype, "targetTexture");
645
- __decorate([
646
- engine_serialization_decorator_js_1.serializable()
647
- ], VideoPlayer.prototype, "time");
648
- __decorate([
649
- engine_serialization_decorator_js_1.serializable()
650
- ], VideoPlayer.prototype, "playbackSpeed");
651
- __decorate([
652
- engine_serialization_decorator_js_1.serializable()
653
- ], VideoPlayer.prototype, "isLooping");
654
- __decorate([
655
- engine_serialization_decorator_js_1.serializable()
656
- ], VideoPlayer.prototype, "audioOutputMode");
657
- return VideoPlayer;
658
- }(Component_js_1.Behaviour));
659
- exports.VideoPlayer = VideoPlayer;
660
- var VideoOverlay = /** @class */ (function () {
661
- function VideoOverlay(context) {
662
- this._videos = [];
663
- this._isInScreenspaceMode = false;
664
- this.context = context;
665
- this._input = new VideoOverlayInput(this);
666
- }
667
- Object.defineProperty(VideoOverlay.prototype, "enabled", {
668
- get: function () {
669
- return this._isInScreenspaceMode;
670
- },
671
- set: function (val) {
672
- if (val)
673
- this.start();
674
- else
675
- this.stop();
676
- },
677
- enumerable: false,
678
- configurable: true
679
- });
680
- VideoOverlay.prototype.add = function (video) {
681
- if (this._videos.indexOf(video) === -1) {
682
- this._videos.push(video);
683
- }
684
- };
685
- VideoOverlay.prototype.remove = function (video) {
686
- if (!video)
687
- return;
688
- var index = this._videos.indexOf(video);
689
- if (index >= 0) {
690
- this._videos.splice(index, 1);
691
- }
692
- };
693
- VideoOverlay.prototype.start = function () {
694
- var _a;
695
- if (this._isInScreenspaceMode)
696
- return;
697
- if (this._videos.length < 0)
698
- return;
699
- var texture = this._videos[this._videos.length - 1];
700
- if (!texture)
701
- return;
702
- this._isInScreenspaceMode = true;
703
- if (!this._screenspaceModeQuad) {
704
- this._screenspaceModeQuad = engine_create_objects_js_1.ObjectUtils.createPrimitive(engine_create_objects_js_1.PrimitiveType.Quad, {
705
- material: new ScreenspaceTexture(texture)
706
- });
707
- if (!this._screenspaceModeQuad)
708
- return;
709
- this._screenspaceModeQuad.geometry.scale(2, 2, 2);
710
- }
711
- var quad = this._screenspaceModeQuad;
712
- this.context.scene.add(quad);
713
- this.updateScreenspaceMaterialUniforms();
714
- var mat = quad.material;
715
- mat === null || mat === void 0 ? void 0 : mat.reset();
716
- (_a = this._input) === null || _a === void 0 ? void 0 : _a.enable(mat);
717
- };
718
- VideoOverlay.prototype.stop = function () {
719
- var _a;
720
- this._isInScreenspaceMode = false;
721
- if (this._screenspaceModeQuad) {
722
- (_a = this._input) === null || _a === void 0 ? void 0 : _a.disable();
723
- this._screenspaceModeQuad.removeFromParent();
724
- }
725
- };
726
- VideoOverlay.prototype.updateScreenspaceMaterialUniforms = function () {
727
- var _a;
728
- var mat = (_a = this._screenspaceModeQuad) === null || _a === void 0 ? void 0 : _a.material;
729
- if (!mat)
730
- return;
731
- // mat.videoAspect = this.videoTexture?.image?.width / this.videoTexture?.image?.height;
732
- mat.screenAspect = this.context.domElement.clientWidth / this.context.domElement.clientHeight;
733
- };
734
- return VideoOverlay;
735
- }());
736
- var VideoOverlayInput = /** @class */ (function () {
737
- function VideoOverlayInput(overlay) {
738
- this._isPinching = false;
739
- this._lastPinch = 0;
740
- this.overlay = overlay;
741
- this.context = overlay.context;
742
- }
743
- VideoOverlayInput.prototype.enable = function (mat) {
744
- var _this = this;
745
- this._material = mat;
746
- window.addEventListener("resize", this._onResizeScreenFn = function () {
747
- _this.overlay.updateScreenspaceMaterialUniforms();
748
- });
749
- window.addEventListener("keyup", this._onKeyUpFn = function (args) {
750
- if (args.key === "Escape")
751
- _this.overlay.stop();
752
- });
753
- window.addEventListener("wheel", this._onMouseWheelFn = function (args) {
754
- if (_this.overlay.enabled) {
755
- mat.zoom += args.deltaY * .0005;
756
- args.preventDefault();
757
- }
758
- }, { passive: false });
759
- var delta = new three_1.Vector2();
760
- window.addEventListener("mousemove", function (args) {
761
- if (_this.overlay.enabled && _this.context.input.getPointerPressed(0)) {
762
- var normalizedMovement = new three_1.Vector2(args.movementX, args.movementY);
763
- normalizedMovement.x /= _this.context.domElement.clientWidth;
764
- normalizedMovement.y /= _this.context.domElement.clientHeight;
765
- delta.set(normalizedMovement.x, normalizedMovement.y);
766
- delta.multiplyScalar(mat.zoom / -_this.context.time.deltaTime * .01);
767
- mat.offset = mat.offset.add(delta);
768
- }
769
- });
770
- window.addEventListener("pointermove", function (args) {
771
- if (_this.overlay.enabled && _this.context.input.getPointerPressed(0)) {
772
- var count = _this.context.input.getTouchesPressedCount();
773
- if (count === 1) {
774
- delta.set(args.movementX, args.movementY);
775
- delta.multiplyScalar(mat.zoom * -_this.context.time.deltaTime * .05);
776
- mat.offset = mat.offset.add(delta);
777
- }
778
- }
779
- });
780
- var lastTouchStartTime = 0;
781
- window.addEventListener("touchstart", function (e) {
782
- if (e.touches.length < 2) {
783
- if (_this.context.time.time - lastTouchStartTime < .3) {
784
- _this.overlay.stop();
785
- }
786
- lastTouchStartTime = _this.context.time.time;
787
- return;
788
- }
789
- _this._isPinching = true;
790
- _this._lastPinch = 0;
791
- });
792
- window.addEventListener("touchmove", function (e) {
793
- if (!_this._isPinching || !_this._material)
794
- return;
795
- var touch1 = e.touches[0];
796
- var touch2 = e.touches[1];
797
- var dx = touch1.clientX - touch2.clientX;
798
- var dy = touch1.clientY - touch2.clientY;
799
- var distance = Math.sqrt(dx * dx + dy * dy);
800
- if (_this._lastPinch !== 0) {
801
- var delta_1 = distance - _this._lastPinch;
802
- _this._material.zoom -= delta_1 * .004;
803
- }
804
- _this._lastPinch = distance;
805
- });
806
- window.addEventListener("touchend", function () {
807
- _this._isPinching = false;
808
- });
809
- };
810
- VideoOverlayInput.prototype.disable = function () {
811
- if (this._onResizeScreenFn) {
812
- window.removeEventListener("resize", this._onResizeScreenFn);
813
- this._onResizeScreenFn = undefined;
814
- }
815
- if (this._onKeyUpFn) {
816
- window.removeEventListener("keyup", this._onKeyUpFn);
817
- this._onKeyUpFn = undefined;
818
- }
819
- if (this._onMouseWheelFn) {
820
- window.removeEventListener("wheel", this._onMouseWheelFn);
821
- this._onMouseWheelFn = undefined;
822
- }
823
- };
824
- return VideoOverlayInput;
825
- }());
826
- var ScreenspaceTexture = /** @class */ (function (_super) {
827
- __extends(ScreenspaceTexture, _super);
828
- // maxZoom : number = 10
829
- function ScreenspaceTexture(tex) {
830
- var _this = _super.call(this) || this;
831
- _this._offset = new three_1.Vector2();
832
- _this.uniforms = {
833
- map: { value: tex },
834
- screenAspect: { value: 1 },
835
- offsetScale: { value: new three_1.Vector4(0, 0, 1, 1) }
836
- };
837
- _this.vertexShader = "\n uniform sampler2D map;\n uniform float screenAspect;\n uniform vec4 offsetScale;\n varying vec2 vUv;\n\n void main() {\n\n gl_Position = vec4( position , 1.0 );\n vUv = uv;\n vUv.y = 1. - vUv.y;\n\n // fit into screen\n ivec2 res = textureSize(map, 0);\n float videoAspect = float(res.x) / float(res.y);\n float aspect = videoAspect / screenAspect;\n if(aspect >= 1.0) \n {\n vUv.y = vUv.y * aspect;\n float offset = (1. - aspect) * .5;\n vUv.y = vUv.y + offset;\n }\n else\n {\n vUv.x = vUv.x / aspect;\n float offset = (1. - 1. / aspect) * .5;\n vUv.x = vUv.x + offset;\n }\n\n vUv.x -= .5;\n vUv.y -= .5;\n\n vUv.x *= offsetScale.z;\n vUv.y *= offsetScale.z;\n vUv.x += offsetScale.x;\n vUv.y += offsetScale.y;\n\n vUv.x += .5;\n vUv.y += .5;\n }\n\n ";
838
- _this.fragmentShader = "\n uniform sampler2D map;\n varying vec2 vUv;\n void main() {\n if(vUv.x < 0. || vUv.x > 1. || vUv.y < 0. || vUv.y > 1.)\n gl_FragColor = vec4(0., 0., 0., 1.);\n else\n {\n vec4 texcolor = texture2D(map, vUv);\n texcolor = LinearTosRGB(texcolor);\n gl_FragColor = texcolor;\n }\n }\n ";
839
- return _this;
840
- }
841
- Object.defineProperty(ScreenspaceTexture.prototype, "screenAspect", {
842
- set: function (val) {
843
- this.uniforms["screenAspect"].value = val;
844
- this.needsUpdate = true;
845
- },
846
- enumerable: false,
847
- configurable: true
848
- });
849
- Object.defineProperty(ScreenspaceTexture.prototype, "offset", {
850
- get: function () {
851
- var val = this.uniforms["offsetScale"].value;
852
- this._offset.set(val.x, val.y);
853
- return this._offset;
854
- },
855
- set: function (vec) {
856
- var val = this.uniforms["offsetScale"].value;
857
- val.x = vec.x;
858
- val.y = vec.y;
859
- // console.log(val);
860
- this.uniforms["offsetScale"].value = val;
861
- this.needsUpdate = true;
862
- },
863
- enumerable: false,
864
- configurable: true
865
- });
866
- Object.defineProperty(ScreenspaceTexture.prototype, "zoom", {
867
- get: function () {
868
- return this.uniforms["offsetScale"].value.z; // * this.maxZoom;
869
- },
870
- set: function (val) {
871
- var zoom = this.uniforms["offsetScale"].value;
872
- if (val < .001)
873
- val = .001;
874
- zoom.z = val;
875
- // zoom.z = this.maxZoom - val;
876
- // zoom.z /= this.maxZoom;
877
- this.needsUpdate = true;
878
- },
879
- enumerable: false,
880
- configurable: true
881
- });
882
- ScreenspaceTexture.prototype.reset = function () {
883
- this.offset = this.offset.set(0, 0);
884
- this.zoom = 1;
885
- this.needsUpdate = true;
886
- };
887
- return ScreenspaceTexture;
888
- }(three_1.ShaderMaterial));
src/engine-components/dist/Voip2.js DELETED
@@ -1,46 +0,0 @@
1
- "use strict";
2
- var __extends = (this && this.__extends) || (function () {
3
- var extendStatics = function (d, b) {
4
- extendStatics = Object.setPrototypeOf ||
5
- ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
6
- function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
7
- return extendStatics(d, b);
8
- };
9
- return function (d, b) {
10
- extendStatics(d, b);
11
- function __() { this.constructor = d; }
12
- d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
13
- };
14
- })();
15
- var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
16
- var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
17
- if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
18
- else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
19
- return c > 3 && r && Object.defineProperty(target, key, r), r;
20
- };
21
- exports.__esModule = true;
22
- exports.Voip2 = void 0;
23
- var Component_js_1 = require("./Component.js");
24
- var engine_networking_streams_js_1 = require("../engine/engine_networking_streams.js");
25
- var Voip2 = /** @class */ (function (_super) {
26
- __extends(Voip2, _super);
27
- function Voip2() {
28
- var _this = _super !== null && _super.apply(this, arguments) || this;
29
- _this.connectOnEnable = false;
30
- return _this;
31
- }
32
- Voip2.prototype.onEnable = function () {
33
- if (!this._net)
34
- this._net = engine_networking_streams_js_1.NetworkedStreams.create(this);
35
- this._net.enable();
36
- };
37
- Voip2.prototype.onDisable = function () {
38
- var _a;
39
- (_a = this._net) === null || _a === void 0 ? void 0 : _a.disable();
40
- };
41
- __decorate([
42
- serializable()
43
- ], Voip2.prototype, "connectOnEnable");
44
- return Voip2;
45
- }(Component_js_1.Behaviour));
46
- exports.Voip2 = Voip2;
src/engine-schemes/dist/vr-user-state-buffer.js DELETED
@@ -1,110 +0,0 @@
1
- "use strict";
2
- // automatically generated by the FlatBuffers compiler, do not modify
3
- exports.__esModule = true;
4
- exports.VrUserStateBuffer = void 0;
5
- var flatbuffers = require("flatbuffers");
6
- var vec3_js_1 = require("./vec3.js");
7
- var vec4_js_1 = require("./vec4.js");
8
- var VrUserStateBuffer = /** @class */ (function () {
9
- function VrUserStateBuffer() {
10
- this.bb = null;
11
- this.bb_pos = 0;
12
- }
13
- VrUserStateBuffer.prototype.__init = function (i, bb) {
14
- this.bb_pos = i;
15
- this.bb = bb;
16
- return this;
17
- };
18
- VrUserStateBuffer.getRootAsVrUserStateBuffer = function (bb, obj) {
19
- return (obj || new VrUserStateBuffer()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
20
- };
21
- VrUserStateBuffer.getSizePrefixedRootAsVrUserStateBuffer = function (bb, obj) {
22
- bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);
23
- return (obj || new VrUserStateBuffer()).__init(bb.readInt32(bb.position()) + bb.position(), bb);
24
- };
25
- VrUserStateBuffer.prototype.guid = function (optionalEncoding) {
26
- var offset = this.bb.__offset(this.bb_pos, 4);
27
- return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
28
- };
29
- VrUserStateBuffer.prototype.time = function () {
30
- var offset = this.bb.__offset(this.bb_pos, 6);
31
- return offset ? this.bb.readInt64(this.bb_pos + offset) : this.bb.createLong(0, 0);
32
- };
33
- VrUserStateBuffer.prototype.avatarId = function (optionalEncoding) {
34
- var offset = this.bb.__offset(this.bb_pos, 8);
35
- return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null;
36
- };
37
- VrUserStateBuffer.prototype.position = function (obj) {
38
- var offset = this.bb.__offset(this.bb_pos, 10);
39
- return offset ? (obj || new vec3_js_1.Vec3()).__init(this.bb_pos + offset, this.bb) : null;
40
- };
41
- VrUserStateBuffer.prototype.rotation = function (obj) {
42
- var offset = this.bb.__offset(this.bb_pos, 12);
43
- return offset ? (obj || new vec4_js_1.Vec4()).__init(this.bb_pos + offset, this.bb) : null;
44
- };
45
- VrUserStateBuffer.prototype.scale = function () {
46
- var offset = this.bb.__offset(this.bb_pos, 14);
47
- return offset ? this.bb.readFloat32(this.bb_pos + offset) : 0.0;
48
- };
49
- VrUserStateBuffer.prototype.posLeftHand = function (obj) {
50
- var offset = this.bb.__offset(this.bb_pos, 16);
51
- return offset ? (obj || new vec3_js_1.Vec3()).__init(this.bb_pos + offset, this.bb) : null;
52
- };
53
- VrUserStateBuffer.prototype.posRightHand = function (obj) {
54
- var offset = this.bb.__offset(this.bb_pos, 18);
55
- return offset ? (obj || new vec3_js_1.Vec3()).__init(this.bb_pos + offset, this.bb) : null;
56
- };
57
- VrUserStateBuffer.prototype.rotLeftHand = function (obj) {
58
- var offset = this.bb.__offset(this.bb_pos, 20);
59
- return offset ? (obj || new vec4_js_1.Vec4()).__init(this.bb_pos + offset, this.bb) : null;
60
- };
61
- VrUserStateBuffer.prototype.rotRightHand = function (obj) {
62
- var offset = this.bb.__offset(this.bb_pos, 22);
63
- return offset ? (obj || new vec4_js_1.Vec4()).__init(this.bb_pos + offset, this.bb) : null;
64
- };
65
- VrUserStateBuffer.startVrUserStateBuffer = function (builder) {
66
- builder.startObject(10);
67
- };
68
- VrUserStateBuffer.addGuid = function (builder, guidOffset) {
69
- builder.addFieldOffset(0, guidOffset, 0);
70
- };
71
- VrUserStateBuffer.addTime = function (builder, time) {
72
- builder.addFieldInt64(1, time, builder.createLong(0, 0));
73
- };
74
- VrUserStateBuffer.addAvatarId = function (builder, avatarIdOffset) {
75
- builder.addFieldOffset(2, avatarIdOffset, 0);
76
- };
77
- VrUserStateBuffer.addPosition = function (builder, positionOffset) {
78
- builder.addFieldStruct(3, positionOffset, 0);
79
- };
80
- VrUserStateBuffer.addRotation = function (builder, rotationOffset) {
81
- builder.addFieldStruct(4, rotationOffset, 0);
82
- };
83
- VrUserStateBuffer.addScale = function (builder, scale) {
84
- builder.addFieldFloat32(5, scale, 0.0);
85
- };
86
- VrUserStateBuffer.addPosLeftHand = function (builder, posLeftHandOffset) {
87
- builder.addFieldStruct(6, posLeftHandOffset, 0);
88
- };
89
- VrUserStateBuffer.addPosRightHand = function (builder, posRightHandOffset) {
90
- builder.addFieldStruct(7, posRightHandOffset, 0);
91
- };
92
- VrUserStateBuffer.addRotLeftHand = function (builder, rotLeftHandOffset) {
93
- builder.addFieldStruct(8, rotLeftHandOffset, 0);
94
- };
95
- VrUserStateBuffer.addRotRightHand = function (builder, rotRightHandOffset) {
96
- builder.addFieldStruct(9, rotRightHandOffset, 0);
97
- };
98
- VrUserStateBuffer.endVrUserStateBuffer = function (builder) {
99
- var offset = builder.endObject();
100
- return offset;
101
- };
102
- VrUserStateBuffer.finishVrUserStateBufferBuffer = function (builder, offset) {
103
- builder.finish(offset);
104
- };
105
- VrUserStateBuffer.finishSizePrefixedVrUserStateBufferBuffer = function (builder, offset) {
106
- builder.finish(offset, undefined, true);
107
- };
108
- return VrUserStateBuffer;
109
- }());
110
- exports.VrUserStateBuffer = VrUserStateBuffer;
src/engine-schemes/dist/api.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: aeb5cd8dfc66d8647a9ae66a8c05b289
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine/dist/api.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: f0e01709ea200b648a0c053c2c79f225
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine-components/dist/AudioSource.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: ed9ae84735d6def4b8122977a98ffa67
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine/dist/engine_networking_streams.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: 48d3950534f4dae478088f839e8c1578
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine-schemes/dist/schemes.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: 5f962500714f41d45b62c935327da875
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine-components/dist/ScreenCapture.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: 426cba1d1719d9240aba902b59b93adb
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine-schemes/dist/synced-camera-model.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: 1d631bded37bcc249bc1c0f40c3c67be
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine-schemes/dist/synced-transform-model.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: e8764b784e44b1346ad7119078221aa2
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine-schemes/dist/transform.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: eab38a2bd1b4834448fd9acc5fba06d0
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine-schemes/dist/vec2.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: a1b516d1fe960a441be75734a9d63918
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine-schemes/dist/vec3.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: 8bb79deb329f82d45b65efdbcfcfa590
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine-schemes/dist/vec4.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: 77f3ae2882ffe74409bea2f9443fb40d
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine-components/dist/VideoPlayer.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: f2624a361e4f4e046998075d7c419627
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine-components/dist/Voip2.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: 92b7a6f0a8029fd4799f91553f3a9a58
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine-schemes/dist/vr-user-state-buffer.js.meta DELETED
@@ -1,7 +0,0 @@
1
- fileFormatVersion: 2
2
- guid: b82b7d00193cb1d4f851ab843c5bb99f
3
- DefaultImporter:
4
- externalObjects: {}
5
- userData:
6
- assetBundleName:
7
- assetBundleVariant:
src/engine/engine_shims.ts CHANGED
@@ -4,8 +4,6 @@
4
4
  convertToBlob: (options?: any) => Promise<Blob>;
5
5
  }
6
6
 
7
- console.log("APPLYING ENGINE SHIMS");
8
-
9
7
  // REMOVE once usage of browsers that don't support OffscreenCanvas is low
10
8
  // Shim for missing OffscreenCanvas in iOS 16.x
11
9
  if (typeof globalThis !== undefined && !('OffscreenCanvas' in globalThis)) {
src/engine/codegen/register_types.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { TypeStore } from "./../engine_typestore.js"
2
-
2
+
3
3
  // Import types
4
4
  import { __Ignore } from "../../engine-components/codegen/components.js";
5
5
  import { ActionBuilder } from "../../engine-components/export/usdz/extensions/behavior/BehavioursBuilder.js";
@@ -219,7 +219,7 @@
219
219
  import { XRGrabRendering } from "../../engine-components/webxr/WebXRGrabRendering.js";
220
220
  import { XRRig } from "../../engine-components/webxr/WebXRRig.js";
221
221
  import { XRState } from "../../engine-components/XRFlag.js";
222
-
222
+
223
223
  // Register types
224
224
  TypeStore.add("__Ignore", __Ignore);
225
225
  TypeStore.add("ActionBuilder", ActionBuilder);
src/engine-components/Renderer.ts CHANGED
@@ -1002,6 +1002,19 @@
1002
1002
  this.inst.layers.set(2);
1003
1003
  this.inst.visible = true;
1004
1004
 
1005
+ // Not handled by RawShaderMaterial, so we need to set the define explicitly.
1006
+ // Edge case: theoretically some users of the material could use it in an
1007
+ // instanced fashion, and some others not. In that case, the material would not
1008
+ // be able to be shared between the two use cases. We could probably add a
1009
+ // onBeforeRender call for the InstancedMesh and set the define there.
1010
+ // Same would apply if we support skinning -
1011
+ // there we would have to split instanced batches so that the ones using skinning
1012
+ // are all in the same batch.
1013
+ if (material instanceof THREE.RawShaderMaterial) {
1014
+ material.defines["USE_INSTANCING"] = true;
1015
+ material.needsUpdate = true;
1016
+ }
1017
+
1005
1018
  // this.inst.castShadow = true;
1006
1019
  // this.inst.receiveShadow = true;
1007
1020
  this.context.scene.add(this.inst);
src/engine-components/export/usdz/ThreeUSDZExporter.ts CHANGED
@@ -26,7 +26,8 @@
26
26
  SRGBColorSpace,
27
27
  AnimationClip,
28
28
  OrthographicCamera,
29
- BufferAttribute
29
+ BufferAttribute,
30
+ Vector4
30
31
  } from 'three';
31
32
  import * as fflate from 'three/examples/jsm/libs/fflate.module.js';
32
33
 
@@ -428,13 +429,15 @@
428
429
 
429
430
  }
430
431
 
432
+ declare type TextureMap = {[name: string]: {texture: Texture, scale?: Vector4}};
433
+
431
434
  class USDZExporterContext {
432
435
  root: any;
433
436
  exporter: any;
434
437
  extensions: any;
435
- materials: {};
436
- textures: {};
437
- files: {};
438
+ materials: Map<string, Material>;
439
+ textures: TextureMap;
440
+ files: { [path: string]: Uint8Array | [Uint8Array, fflate.ZipOptions] | null | any }
438
441
  document: USDDocument;
439
442
  output: string;
440
443
  animations: AnimationClip[];
@@ -447,7 +450,7 @@
447
450
  if ( extensions )
448
451
  this.extensions = extensions;
449
452
 
450
- this.materials = {};
453
+ this.materials = new Map();
451
454
  this.textures = {};
452
455
  this.files = {};
453
456
  this.document = new USDDocument();
@@ -578,18 +581,19 @@
578
581
 
579
582
  for ( const id in textures ) {
580
583
 
581
- let texture = textures[ id ];
584
+ const textureData = textures[ id ];
585
+ let texture = textureData.texture;
582
586
 
583
587
  const isRGBA = formatsWithAlphaChannel.includes( texture.format );
584
588
 
589
+ //@ts-ignore
585
590
  if ( texture.isCompressedTexture || texture.isRenderTargetTexture ) {
586
591
 
587
592
  texture = decompressGpuTexture( texture, options.maxTextureSize, decompressionRenderer );
588
593
 
589
594
  }
590
595
 
591
- // TODO add readback options for textures that don't have texture.image
592
- const canvas = await imageToCanvas( texture.image ).catch( err => {
596
+ const canvas = await imageToCanvas( texture.image, textureData.scale, false, options.maxTextureSize ).catch( err => {
593
597
  console.error("Error converting texture to canvas", texture, err);
594
598
  });
595
599
 
@@ -869,6 +873,7 @@
869
873
  #else
870
874
  gl_FragColor = texture2D( blitTexture, vUv);
871
875
  #endif
876
+ gl_FragColor.rgb *= gl_FragColor.a;
872
877
  }`
873
878
  } );
874
879
 
@@ -926,7 +931,7 @@
926
931
 
927
932
  }
928
933
 
929
- async function imageToCanvas( image, color: string | undefined = undefined, flipY = false, maxTextureSize = 4096 ) {
934
+ async function imageToCanvas( image, color: Vector4 | undefined = undefined, flipY = false, maxTextureSize = 4096 ) {
930
935
 
931
936
  if ( isImageBitmap( image ) ) {
932
937
 
@@ -947,15 +952,14 @@
947
952
 
948
953
  context.drawImage( image, 0, 0, canvas.width, canvas.height );
949
954
 
950
- // TODO remove, not used anymore
955
+ // Currently only used to apply opacity scale since QuickLook and usdview don't support that yet
951
956
  if ( color !== undefined ) {
952
957
 
953
- const hex = parseInt( color, 16 );
958
+ const r = color.x;
959
+ const g = color.y;
960
+ const b = color.z;
961
+ const a = color.w;
954
962
 
955
- const r = ( hex >> 16 & 255 ) / 255;
956
- const g = ( hex >> 8 & 255 ) / 255;
957
- const b = ( hex & 255 ) / 255;
958
-
959
963
  const imagedata = context.getImageData( 0, 0, canvas.width, canvas.height );
960
964
  const data = imagedata.data;
961
965
 
@@ -964,6 +968,7 @@
964
968
  data[ i + 0 ] = data[ i + 0 ] * r;
965
969
  data[ i + 1 ] = data[ i + 1 ] * g;
966
970
  data[ i + 2 ] = data[ i + 2 ] * b;
971
+ data[ i + 3 ] = data[ i + 3 ] * a;
967
972
 
968
973
  }
969
974
 
@@ -1371,7 +1376,7 @@
1371
1376
 
1372
1377
  // Materials
1373
1378
 
1374
- function buildMaterials( materials, textures, quickLookCompatible = false ) {
1379
+ function buildMaterials( materials: Map<string, Material>, textures: TextureMap, quickLookCompatible = false ) {
1375
1380
 
1376
1381
  const array: Array<string> = [];
1377
1382
 
@@ -1392,7 +1397,7 @@
1392
1397
 
1393
1398
  }
1394
1399
 
1395
- function buildMaterial( material: MeshBasicMaterial, textures, quickLookCompatible = false ) {
1400
+ function buildMaterial( material: MeshBasicMaterial, textures: TextureMap, quickLookCompatible = false ) {
1396
1401
 
1397
1402
  // https://graphics.pixar.com/usd/docs/UsdPreviewSurface-Proposal.html
1398
1403
 
@@ -1404,7 +1409,11 @@
1404
1409
 
1405
1410
  const id = texture.id + ( color ? '_' + color.getHexString() : '' ) + ( opacity !== undefined ? '_' + opacity : '' );
1406
1411
 
1407
- textures[ id ] = texture;
1412
+ // Seems neither QuickLook nor usdview support scale/bias on .a values, so we need to bake opacity multipliers into
1413
+ // the texture. This is not ideal.
1414
+ const opacityIsAppliedToTextureAndNotAsScale = quickLookCompatible && opacity !== undefined;
1415
+ const scaleToApply = opacityIsAppliedToTextureAndNotAsScale ? new Vector4(1, 1, 1, opacity) : undefined;
1416
+ textures[ id ] = { texture, scale: scaleToApply };
1408
1417
 
1409
1418
  const uv = texture.channel > 0 ? 'st' + texture.channel : 'st';
1410
1419
 
@@ -1481,7 +1490,7 @@
1481
1490
  token inputs:sourceColorSpace = "${ texture.colorSpace === 'srgb' ? 'sRGB' : 'raw' }"
1482
1491
  float2 inputs:st.connect = ${needsTextureTransform ? textureTransformOutput : textureTransformInput}
1483
1492
  ${needsTextureScale ? `
1484
- float4 inputs:scale = (${color ? color.r + ', ' + color.g + ', ' + color.b : '1, 1, 1'}, ${opacity !== undefined ? opacity : '1'})
1493
+ float4 inputs:scale = (${color ? color.r + ', ' + color.g + ', ' + color.b : '1, 1, 1'}, ${(opacity !== undefined && !opacityIsAppliedToTextureAndNotAsScale) ? opacity : '1'})
1485
1494
  ` : `` }
1486
1495
  ${needsNormalScaleAndBias ? `
1487
1496
  float4 inputs:scale = (${normalScaleValueString}, ${normalScaleValueString}, ${normalScaleValueString}, 1)
@@ -1499,7 +1508,7 @@
1499
1508
  }
1500
1509
 
1501
1510
  let effectiveOpacity = ( material.transparent || material.alphaTest ) ? material.opacity : 1;
1502
-
1511
+
1503
1512
  if ( material instanceof MeshPhysicalMaterial && material.transmission !== undefined) {
1504
1513
 
1505
1514
  // TODO does not help when a roughnessMap is used
@@ -1549,7 +1558,7 @@
1549
1558
  inputs.push( `${pad}float inputs:opacity.connect = ${materialRoot}/Material_${material.id}/Texture_${material.alphaMap.id}_opacity.outputs:r>` );
1550
1559
  inputs.push( `${pad}float inputs:opacityThreshold = 0.0001` );
1551
1560
 
1552
- samplers.push( buildTexture( material.alphaMap, 'opacity' ) );
1561
+ samplers.push( buildTexture( material.alphaMap, 'opacity', new Color( 1, 1, 1 ), effectiveOpacity ) );
1553
1562
 
1554
1563
  } else {
1555
1564
 
@@ -1568,12 +1577,15 @@
1568
1577
  if ( material.emissiveMap ) {
1569
1578
 
1570
1579
  inputs.push( `${pad}color3f inputs:emissiveColor.connect = ${materialRoot}/Material_${material.id}/Texture_${material.emissiveMap.id}_emissive.outputs:rgb>` );
1580
+ const color = material.emissive.clone();
1581
+ color.multiplyScalar( material.emissiveIntensity );
1582
+ samplers.push( buildTexture( material.emissiveMap, 'emissive', color ) );
1571
1583
 
1572
- samplers.push( buildTexture( material.emissiveMap, 'emissive' ) );
1573
-
1574
1584
  } else if ( material.emissive?.getHex() > 0 ) {
1575
1585
 
1576
- inputs.push( `${pad}color3f inputs:emissiveColor = ${buildColor( material.emissive )}` );
1586
+ const color = material.emissive.clone();
1587
+ color.multiplyScalar( material.emissiveIntensity );
1588
+ inputs.push( `${pad}color3f inputs:emissiveColor = ${buildColor( color )}` );
1577
1589
 
1578
1590
  } else {