Needle Engine

Changes between version 3.32.5-alpha and 3.32.6-alpha
Files changed (6) hide show
  1. src/engine/engine_addressables.ts +17 -19
  2. src/engine/engine_gameobject.ts +24 -2
  3. src/engine/engine_networking_instantiate.ts +3 -3
  4. src/engine-components/ui/EventSystem.ts +16 -3
  5. src/engine-components/timeline/PlayableDirector.ts +1 -1
  6. src/engine-components/timeline/TimelineTracks.ts +51 -14
src/engine/engine_addressables.ts CHANGED
@@ -1,5 +1,5 @@
1
- import { getParam, resolveUrl } from "../engine/engine_utils.js";
2
- import { SerializationContext, TypeSerializer } from "./engine_serialization_core.js";
1
+ import { deepClone, getParam, resolveUrl } from "../engine/engine_utils.js";
2
+ import { SerializationContext, TypeSerializer, assign } from "./engine_serialization_core.js";
3
3
  import { Context } from "./engine_setup.js";
4
4
  import { Group, Object3D, Texture, TextureLoader } from "three";
5
5
  import { processNewScripts } from "./engine_mainloop_utils.js";
@@ -272,32 +272,30 @@
272
272
  }
273
273
  }
274
274
 
275
- private async onInstantiate(parent?: Object3D | InstantiateOptions, networked: boolean = false, saveOnServer?: boolean) {
275
+ private async onInstantiate(opts?: Object3D | IInstantiateOptions, networked: boolean = false, saveOnServer?: boolean) {
276
276
  const context = Context.Current;
277
+
278
+ // clone the instantiate options immediately
279
+ // in case the user is not awaiting this call and already modifying the options
280
+ const options = new InstantiateOptions();
281
+ if (opts instanceof Object3D) {
282
+ options.parent = opts;
283
+ }
284
+ else if (opts) {
285
+ options.cloneAssign(opts);
286
+ }
287
+ if (!options.parent) options.parent = context.scene;
288
+
289
+ // ensure the asset is loaded
277
290
  if (this.mustLoad) {
278
291
  await this.loadAssetAsync();
279
292
  }
280
293
  if (debug)
281
- console.log("Instantiate", this.uri, "parent:", parent);
294
+ console.log("Instantiate", this.uri, "parent:", opts);
282
295
 
283
296
  if (this.asset) {
284
297
  if (debug) console.log("Add to scene", this.asset);
285
298
 
286
- let options = parent instanceof InstantiateOptions ? parent : null;
287
- if (!options) {
288
- options = new InstantiateOptions();
289
- }
290
-
291
- if (typeof parent === "object") {
292
- if (parent instanceof Object3D) {
293
- options.parent = parent;
294
- }
295
- else {
296
- Object.assign(options, parent);
297
- }
298
- }
299
- if (!options.parent) options.parent = context.scene;
300
-
301
299
  let count = AssetReference.currentlyInstantiating.get(this.uri);
302
300
  // allow up to 10000 instantiations of the same prefab in the same frame
303
301
  if (count !== undefined && count >= 10000) {
src/engine/engine_gameobject.ts CHANGED
@@ -51,6 +51,27 @@
51
51
  scale?: Vector3 | undefined;
52
52
  visible?: boolean | undefined;
53
53
  context?: Context | undefined;
54
+
55
+ clone(){
56
+ const clone = new InstantiateOptions();
57
+ clone.idProvider = this.idProvider;
58
+ clone.parent = this.parent;
59
+ clone.keepWorldPosition = this.keepWorldPosition;
60
+ clone.position = this.position?.clone();
61
+ clone.rotation = this.rotation?.clone();
62
+ clone.scale = this.scale?.clone();
63
+ return clone;
64
+ }
65
+
66
+ /** Copy fields from another object, clone field references */
67
+ cloneAssign(other: InstantiateOptions | IInstantiateOptions){
68
+ this.idProvider = other.idProvider;
69
+ this.parent = other.parent;
70
+ this.keepWorldPosition = other.keepWorldPosition;
71
+ this.position = other.position?.clone();
72
+ this.rotation = other.rotation?.clone();
73
+ this.scale = other.scale?.clone();
74
+ }
54
75
  }
55
76
 
56
77
 
@@ -245,7 +266,7 @@
245
266
  clone: Object3D;
246
267
  }
247
268
 
248
- export function instantiate(instance: GameObject | Object3D | null, opts: InstantiateOptions | null = null): GameObject | null {
269
+ export function instantiate(instance: GameObject | Object3D | null, opts: IInstantiateOptions | null = null): GameObject | null {
249
270
  if (instance === null) return null;
250
271
 
251
272
  let options: InstantiateOptions | null = null;
@@ -264,6 +285,7 @@
264
285
  // }
265
286
  }
266
287
  }
288
+ console.log(options?.position)
267
289
 
268
290
  let context = Context.Current;
269
291
  if (options?.context) context = options.context;
@@ -319,7 +341,7 @@
319
341
 
320
342
 
321
343
  function internalInstantiate(
322
- context: Context, instance: GameObject | Object3D, opts: InstantiateOptions | null,
344
+ context: Context, instance: GameObject | Object3D, opts: IInstantiateOptions | InstantiateOptions | null,
323
345
  componentsList: Array<Component>,
324
346
  newGameObjectsMap: { [key: string]: NewGameObjectReferenceInfo },
325
347
  skinnedMeshesMap: { [key: string]: NewGameObjectReferenceInfo }
src/engine/engine_networking_instantiate.ts CHANGED
@@ -11,7 +11,7 @@
11
11
  import type { UIDProvider } from "./engine_types.js";
12
12
  import type { IModel } from "./engine_networking_types.js";
13
13
  import { SendQueue } from "./engine_networking_types.js";
14
- import { destroy, findByGuid, instantiate } from "./engine_gameobject.js";
14
+ import { IInstantiateOptions, destroy, findByGuid, instantiate } from "./engine_gameobject.js";
15
15
  import { Object3D } from "three";
16
16
  import { InstantiateOptions } from "./engine_gameobject.js";
17
17
  import { ContextEvent, ContextRegistry } from "../engine/engine_context_registry.js";
@@ -185,7 +185,7 @@
185
185
  }
186
186
  }
187
187
 
188
- export function syncInstantiate(object: GameObject | Object3D, opts: InstantiateOptions, hostData?: HostData, save?: boolean): GameObject | null {
188
+ export function syncInstantiate(object: GameObject | Object3D, opts: IInstantiateOptions, hostData?: HostData, save?: boolean): GameObject | null {
189
189
 
190
190
  const obj: GameObject = object as GameObject;
191
191
 
@@ -290,7 +290,7 @@
290
290
 
291
291
  }
292
292
 
293
- function instantiateSeeded(obj: GameObject, opts: InstantiateOptions | null): { instance: GameObject | null, seed: number } {
293
+ function instantiateSeeded(obj: GameObject, opts: IInstantiateOptions | null): { instance: GameObject | null, seed: number } {
294
294
  const seed = generateSeed();
295
295
  const options = opts ?? new InstantiateOptions();
296
296
  options.idProvider = new InstantiateIdProvider(seed);
src/engine-components/ui/EventSystem.ts CHANGED
@@ -488,9 +488,22 @@
488
488
  const pressedEvent = this.pressedByID.get(args.pointerId);
489
489
  const hoveredObjectChanged = prevHovering !== object;
490
490
 
491
- const posLastFrame = this.context.input.getPointerPositionLastFrame(args.pointerId!)!;
492
- const posThisFrame = this.context.input.getPointerPosition(args.pointerId!)!;
493
- const isMoving = posLastFrame && !Mathf.approximately(posLastFrame, posThisFrame);
491
+ // TODO: should we not move this check up before we even raycast for "pointerMove" events? We dont need to do any processing if the pointer didnt move
492
+ let isMoving = true;
493
+ switch (args.event.pointerType) {
494
+ case "mouse":
495
+ case "touch":
496
+ const posLastFrame = this.context.input.getPointerPositionLastFrame(args.pointerId!)!;
497
+ const posThisFrame = this.context.input.getPointerPosition(args.pointerId!)!;
498
+ isMoving = posLastFrame && !Mathf.approximately(posLastFrame, posThisFrame);
499
+ break;
500
+ case "controller":
501
+ case "hand":
502
+ // for hands and controller we assume they are never totally still (except for simulated environments)
503
+ // we might want to add a threshold here (e.g. if a user holds their hand very still or controller)
504
+ // so maybe check the angle every frame?
505
+ break;
506
+ }
494
507
 
495
508
  this.propagate(object, args, (behaviour) => {
496
509
  const comp = behaviour as any;
src/engine-components/timeline/PlayableDirector.ts CHANGED
@@ -164,9 +164,9 @@
164
164
  if (!this.isValid()) return;
165
165
  const pauseChanged = this._isPaused == true;
166
166
  this._isPaused = false;
167
- if (pauseChanged) this.invokePauseChangedMethodsOnTracks();
168
167
  if (this._isPlaying) return;
169
168
  this._isPlaying = true;
169
+ if (pauseChanged) this.invokePauseChangedMethodsOnTracks();
170
170
  if (this.waitForAudio) {
171
171
  // Make sure audio tracks have loaded at the current time
172
172
  const promises: Array<Promise<any>> = [];
src/engine-components/timeline/TimelineTracks.ts CHANGED
@@ -563,14 +563,16 @@
563
563
 
564
564
  const muteAudioTracks = getParam("mutetimeline");
565
565
 
566
+ declare type AudioClipModel = Models.ClipModel & { _didTriggerPlay: boolean };
567
+
566
568
  export class AudioTrackHandler extends TrackHandler {
567
569
 
568
- models: Array<Models.ClipModel> = [];
570
+ models: Array<AudioClipModel> = [];
569
571
  listener!: AudioListener;
570
572
  audio: Array<Audio> = [];
571
573
  audioContextTimeOffset: Array<number> = [];
572
574
  lastTime: number = 0;
573
- audioSource?:AudioSource;
575
+ audioSource?: AudioSource;
574
576
 
575
577
  private _audioLoader: AudioLoader | null = null;
576
578
 
@@ -591,7 +593,9 @@
591
593
  addModel(model: Models.ClipModel) {
592
594
  const audio = new Audio(this.listener as any);
593
595
  this.audio.push(audio);
594
- this.models.push(model);
596
+ const audioClipModel = model as AudioClipModel;
597
+ audioClipModel._didTriggerPlay = false;
598
+ this.models.push(audioClipModel);
595
599
  }
596
600
 
597
601
  onDisable() {
@@ -599,6 +603,9 @@
599
603
  if (audio.isPlaying)
600
604
  audio.stop();
601
605
  }
606
+ for (const model of this.models) {
607
+ model._didTriggerPlay = false;
608
+ }
602
609
  }
603
610
 
604
611
  onDestroy() {
@@ -626,8 +633,23 @@
626
633
  if (audio?.isPlaying)
627
634
  audio.stop();
628
635
  }
636
+ for (const model of this.models) {
637
+ model._didTriggerPlay = false;
638
+ }
629
639
  }
630
640
 
641
+ private _playableDirectorResumed = false;
642
+ onPauseChanged() {
643
+ // if the timeline gets paused we stop all audio clips
644
+ // we dont reset the triggerPlay here (this will automatically reset when the timeline start evaluating again)
645
+ for (let i = 0; i < this.audio.length; i++) {
646
+ const audio = this.audio[i];
647
+ if (audio?.isPlaying)
648
+ audio.stop();
649
+ }
650
+ this._playableDirectorResumed = this.director.isPlaying;
651
+ }
652
+
631
653
  evaluate(time: number) {
632
654
  if (muteAudioTracks) return;
633
655
  if (this.track.muted) return;
@@ -636,6 +658,8 @@
636
658
  return;
637
659
  }
638
660
  const isMuted = this.director.context.application.muted;
661
+ const resumePlay = this._playableDirectorResumed;
662
+ this._playableDirectorResumed = false;
639
663
  // this is just so that we dont hear the very first beat when the audio starts but is muted
640
664
  // if we dont add a delay we hear a little bit of the audio before it shuts down
641
665
  // MAYBE instead of doing it like this we should connect a custom audio node (or disconnect the output node?)
@@ -653,15 +677,24 @@
653
677
  audio.playbackRate = this.director.context.time.timeScale * this.director.speed;
654
678
  audio.loop = asset.loop;
655
679
  if (time >= model.start && time <= model.end && time < this.director.duration) {
656
- if (this.director.isPlaying == false) {
657
- if (audio.isPlaying)
658
- audio.stop();
659
- if (this.lastTime === time) continue;
680
+ if (!audio.isPlaying || !this.director.isPlaying) {
681
+ // if the timeline is paused we trigger the audio clip once when the model is entered
682
+ // we dont playback the audio clip if we scroll back in time
683
+ // this is to support audioclip playback when using timeline with manual scrolling (scrollytelling)
684
+ if (resumePlay || (!model._didTriggerPlay && this.lastTime < time)) {
685
+ // we don't want to clip in the audio if it's a very short clip
686
+ const clipDuration = model.duration * model.timeScale;
687
+ if (clipDuration > .3)
688
+ audio.offset = model.clipIn + (time - model.start) * model.timeScale;
689
+ else audio.offset = 0;
690
+ if (debug) console.log("Timeline Audio (" + this.track.name + ") play with offset " + audio.offset + " - " + model.asset.clip);
691
+ audio.play(playTimeOffset);
692
+ model._didTriggerPlay = true;
693
+ }
694
+ else {
695
+ // do nothing...
696
+ }
660
697
  }
661
- else if (!audio.isPlaying) {
662
- audio.offset = model.clipIn + (time - model.start) * model.timeScale;
663
- audio.play(playTimeOffset);
664
- }
665
698
  else {
666
699
  const targetOffset = model.clipIn + (time - model.start) * model.timeScale;
667
700
  // seems it's non-trivial to get the right time from audio sources;
@@ -677,7 +710,7 @@
677
710
  }
678
711
  let vol = asset.volume as number;
679
712
 
680
- if(this.track.volume !== undefined)
713
+ if (this.track.volume !== undefined)
681
714
  vol *= this.track.volume;
682
715
 
683
716
  if (isMuted) vol = 0;
@@ -692,8 +725,12 @@
692
725
  audio.setVolume(vol * this.director.weight);
693
726
  }
694
727
  else {
695
- if (audio.isPlaying)
696
- audio.stop();
728
+ model._didTriggerPlay = false;
729
+ if (this.director.isPlaying) {
730
+ if (audio.isPlaying) {
731
+ audio.stop();
732
+ }
733
+ }
697
734
  }
698
735
  }
699
736
  this.lastTime = time;