Needle Engine

Changes between version 3.0.1-alpha.5 and 3.1.0-alpha
Files changed (30) hide show
  1. src/include/three/ARButton.js +2 -2
  2. plugins/vite/config.js +8 -0
  3. plugins/vite/copyfiles.js +12 -3
  4. plugins/vite/drop.js +1 -0
  5. plugins/vite/index.js +2 -0
  6. src/engine/codegen/register_types.js +8 -0
  7. src/engine-components/AnimatorController.ts +18 -3
  8. src/engine/api.ts +1 -1
  9. src/engine-components/AudioSource.ts +2 -2
  10. src/engine-components/Component.ts +1 -1
  11. src/engine-components/codegen/components.ts +4 -0
  12. src/engine-components/postprocessing/Effects/DepthOfField.ts +3 -0
  13. src/engine/engine_addressables.ts +102 -8
  14. src/engine/engine_serialization_builtin_serializer.ts +23 -3
  15. src/engine/engine_serialization_core.ts +27 -20
  16. src/engine/engine_serialization.ts +5 -4
  17. src/engine/engine_utils.ts +35 -14
  18. src/engine/extensions/NEEDLE_animator_controller_model.ts +1 -0
  19. src/engine/extensions/NEEDLE_progressive.ts +2 -2
  20. src/engine-components/ParticleSystem.ts +34 -4
  21. src/engine-components/ParticleSystemModules.ts +1 -1
  22. src/engine-components/Skybox.ts +2 -2
  23. src/engine-components/ui/Text.ts +7 -7
  24. src/engine-components/timeline/TimelineTracks.ts +2 -2
  25. src/engine-components/VideoPlayer.ts +6 -6
  26. src/engine-components/WebXR.ts +11 -2
  27. src/engine-components/WebXRController.ts +2 -2
  28. plugins/vite/transform-codegen.js +45 -0
  29. src/engine-components/postprocessing/Effects/TiltShiftEffect.ts +56 -0
  30. src/engine-components/WebXRImageTracking.ts +192 -0
src/include/three/ARButton.js CHANGED
@@ -1,6 +1,6 @@
1
1
  class ARButton {
2
2
 
3
- static createButton( renderer, options = {} ) {
3
+ static createButton( renderer, options = {}, beforeRequestSession ) {
4
4
 
5
5
  const button = document.createElement( 'button' );
6
6
  let ARButtonControlsDomOverlay = false;
@@ -126,7 +126,7 @@
126
126
  button.onclick = function () {
127
127
 
128
128
  if ( currentSession === null ) {
129
-
129
+ beforeRequestSession?.call(this, options);
130
130
  navigator.xr.requestSession( 'immersive-ar', options ).then( onSessionStarted );
131
131
 
132
132
  } else {
plugins/vite/config.js CHANGED
@@ -1,5 +1,6 @@
1
1
  import { existsSync, readFileSync } from 'fs';
2
2
 
3
+ /** the codegen meta file */
3
4
  export async function loadConfig(path) {
4
5
  try {
5
6
  // First try to get the path from the config
@@ -28,6 +29,7 @@
28
29
  }
29
30
  }
30
31
 
32
+ /** get the needle.config.json */
31
33
  export function tryLoadProjectConfig() {
32
34
  try {
33
35
  const root = process.cwd();
@@ -45,4 +47,10 @@
45
47
  }
46
48
 
47
49
  return null;
50
+ }
51
+
52
+
53
+ /** "assets" -> the directory name inside the output directory to put e.g. glb files into */
54
+ export function builtAssetsDirectory(){
55
+ return "assets";
48
56
  }
plugins/vite/copyfiles.js CHANGED
@@ -1,6 +1,7 @@
1
1
 
2
2
  import { resolve, join } from 'path'
3
3
  import { existsSync, statSync, mkdirSync, readdirSync, copyFileSync, mkdir } from 'fs';
4
+ import { builtAssetsDirectory, tryLoadProjectConfig } from './config.js';
4
5
 
5
6
 
6
7
  /** copy files on build from assets to dist */
@@ -19,7 +20,14 @@
19
20
  const baseDir = process.cwd();
20
21
  const pluginName = "needle-copy-files";
21
22
 
22
- const outdirName = "dist";
23
+ let assetsDirName = "assets";
24
+ let outdirName = "dist";
25
+
26
+ const needleConfig = tryLoadProjectConfig();
27
+ if(needleConfig){
28
+ assetsDirName = needleConfig.assetsDirectory;
29
+ }
30
+
23
31
  const outDir = resolve(baseDir, outdirName);
24
32
  if (!existsSync(outDir)) {
25
33
  mkdirSync(outDir);
@@ -36,9 +44,9 @@
36
44
  }
37
45
 
38
46
  // copy assets dir
39
- const assetsDir = resolve(baseDir, 'assets');
47
+ const assetsDir = resolve(baseDir, assetsDirName);
40
48
  if (existsSync(assetsDir)) {
41
- console.log(`[${pluginName}] - Copy assets to ${outdirName}/assets`)
49
+ console.log(`[${pluginName}] - Copy assets to ${outdirName}/${builtAssetsDirectory()}`)
42
50
  const targetDir = resolve(outDir, 'assets');
43
51
  copyRecursiveSync(assetsDir, targetDir);
44
52
  }
@@ -53,6 +61,7 @@
53
61
  }
54
62
  }
55
63
 
64
+
56
65
  function copyRecursiveSync(src, dest) {
57
66
  var exists = existsSync(src);
58
67
  var stats = exists && statSync(src);
plugins/vite/drop.js CHANGED
@@ -6,6 +6,7 @@
6
6
  const __filename = fileURLToPath(import.meta.url);
7
7
  const __dirname = path.dirname(__filename);
8
8
 
9
+ /** experimental, allow dropping files from Unity into the running scene */
9
10
  export const needleDrop = (command, config, userSettings) => {
10
11
  if (command === "build") return;
11
12
 
plugins/vite/index.js CHANGED
@@ -6,6 +6,7 @@
6
6
  import { editorConnection } from "./editor-connection.js";
7
7
  import { needleCopyFiles } from "./copyfiles.js";
8
8
  import { needleViteAlias } from "./alias.js";
9
+ import { needleTransformCodegen } from "./transform-codegen.js";
9
10
  import { needleLicense } from "./license.js";
10
11
 
11
12
  export * from "./gzip.js";
@@ -27,6 +28,7 @@
27
28
  needleReload(command, config, userSettings),
28
29
  needleBuild(command, config, userSettings),
29
30
  needleCopyFiles(command, config, userSettings),
31
+ needleTransformCodegen(command, config, userSettings),
30
32
  needleDrop(command, config, userSettings),
31
33
  ];
32
34
  array.push(await editorConnection(command, config, userSettings, array));
src/engine/codegen/register_types.js CHANGED
@@ -150,6 +150,7 @@
150
150
  import { TestSimulateUserData } from "../../engine-components/TestRunner";
151
151
  import { Text } from "../../engine-components/ui/Text";
152
152
  import { TextureSheetAnimationModule } from "../../engine-components/ParticleSystemModules";
153
+ import { TiltShiftEffect } from "../../engine-components/postprocessing/Effects/TiltShiftEffect";
153
154
  import { ToneMapping } from "../../engine-components/postprocessing/Effects/Tonemapping";
154
155
  import { TrailModule } from "../../engine-components/ParticleSystemModules";
155
156
  import { TransformData } from "../../engine-components/export/usdz/extensions/Animation";
@@ -173,7 +174,10 @@
173
174
  import { WebXR } from "../../engine-components/WebXR";
174
175
  import { WebXRAvatar } from "../../engine-components/WebXRAvatar";
175
176
  import { WebXRController } from "../../engine-components/WebXRController";
177
+ import { WebXRImageTracking } from "../../engine-components/WebXRImageTracking";
178
+ import { WebXRImageTrackingModel } from "../../engine-components/WebXRImageTracking";
176
179
  import { WebXRSync } from "../../engine-components/WebXRSync";
180
+ import { WebXRTrackedImage } from "../../engine-components/WebXRImageTracking";
177
181
  import { XRFlag } from "../../engine-components/XRFlag";
178
182
  import { XRGrabModel } from "../../engine-components/WebXRGrabRendering";
179
183
  import { XRGrabRendering } from "../../engine-components/WebXRGrabRendering";
@@ -330,6 +334,7 @@
330
334
  TypeStore.add("TestSimulateUserData", TestSimulateUserData);
331
335
  TypeStore.add("Text", Text);
332
336
  TypeStore.add("TextureSheetAnimationModule", TextureSheetAnimationModule);
337
+ TypeStore.add("TiltShiftEffect", TiltShiftEffect);
333
338
  TypeStore.add("ToneMapping", ToneMapping);
334
339
  TypeStore.add("TrailModule", TrailModule);
335
340
  TypeStore.add("TransformData", TransformData);
@@ -353,7 +358,10 @@
353
358
  TypeStore.add("WebXR", WebXR);
354
359
  TypeStore.add("WebXRAvatar", WebXRAvatar);
355
360
  TypeStore.add("WebXRController", WebXRController);
361
+ TypeStore.add("WebXRImageTracking", WebXRImageTracking);
362
+ TypeStore.add("WebXRImageTrackingModel", WebXRImageTrackingModel);
356
363
  TypeStore.add("WebXRSync", WebXRSync);
364
+ TypeStore.add("WebXRTrackedImage", WebXRTrackedImage);
357
365
  TypeStore.add("XRFlag", XRFlag);
358
366
  TypeStore.add("XRGrabModel", XRGrabModel);
359
367
  TypeStore.add("XRGrabRendering", XRGrabRendering);
src/engine-components/AnimatorController.ts CHANGED
@@ -249,9 +249,16 @@
249
249
 
250
250
  if (action) {
251
251
  const dur = state.motion.clip!.duration;
252
- const normalizedTime = dur <= 0 ? 1 : action.time / dur;
253
- const makeTransition = transition.hasExitTime ? normalizedTime >= transition.exitTime : true;
254
- // console.log(state.name, makeTransition, transition.hasExitTime, normalizedTime, transition.exitTime)
252
+ const normalizedTime = dur <= 0 ? 1 : Math.abs(action.time / dur);
253
+ let makeTransition = false;
254
+ if (transition.hasExitTime) {
255
+ if (action.timeScale > 0) makeTransition = normalizedTime >= transition.exitTime;
256
+ // When the animation is playing backwards we need to check exit time inverted
257
+ else if(action.timeScale < 0) makeTransition = 1 - normalizedTime >= transition.exitTime;
258
+ }
259
+ else {
260
+ makeTransition = true;
261
+ }
255
262
  if (makeTransition) {
256
263
  // if (transition.hasExitTime && transition.exitTime >= .9999)
257
264
  action.clampWhenFinished = true;
@@ -283,6 +290,12 @@
283
290
  action.time = 0;
284
291
  action.play();
285
292
  }
293
+ else if (action.time <= 0 && action.timeScale < 0) {
294
+ didTriggerLooping = true;
295
+ action.reset();
296
+ action.time = action.getClip().duration;
297
+ action.play();
298
+ }
286
299
  }
287
300
 
288
301
  // call update state behaviours:
@@ -367,9 +380,11 @@
367
380
  action.stop();
368
381
  action.reset();
369
382
  action.timeScale = this._speed;
383
+ if (state.speed !== undefined) action.timeScale *= state.speed;
370
384
  action.enabled = true;
371
385
  const duration = state.motion.clip!.duration;
372
386
  action.time = offsetNormalized * duration;
387
+ if(action.timeScale < 0) action.time = duration - action.time;
373
388
  action.clampWhenFinished = true;
374
389
  action.setLoop(LoopOnce, 0);
375
390
  if (durationInSec > 0)
src/engine/api.ts CHANGED
@@ -6,7 +6,7 @@
6
6
  export * from "./engine_gameobject";
7
7
  export * from "./engine_components";
8
8
  export * from "./engine_components_internal";
9
- export { AssetReference } from "./engine_addressables";
9
+ export { AssetReference, ImageReference } from "./engine_addressables";
10
10
  export { Context, FrameEvent } from "./engine_setup";
11
11
  export * from "./debug/debug";
12
12
  export { validate } from "./engine_util_decorator"
src/engine-components/AudioSource.ts CHANGED
@@ -82,7 +82,7 @@
82
82
  document.addEventListener('touchstart', fn);
83
83
  }
84
84
 
85
- @serializable()
85
+ @serializable(URL)
86
86
  clip: string = "";
87
87
 
88
88
  @serializable()
@@ -202,7 +202,7 @@
202
202
  }
203
203
  break;
204
204
  case "visible":
205
- console.log("visible", this.enabled, this.playOnAwake, !this.isPlaying, AudioSource._userInteractionRegistered, this.wasPlaying);
205
+ if (debug) console.log("visible", this.enabled, this.playOnAwake, !this.isPlaying, AudioSource._userInteractionRegistered, this.wasPlaying);
206
206
  if (this.enabled && this.playOnAwake && !this.isPlaying && AudioSource._userInteractionRegistered && this.wasPlaying) {
207
207
  this.play();
208
208
  }
src/engine-components/Component.ts CHANGED
@@ -9,6 +9,7 @@
9
9
  import { ConstructorConcrete, SourceIdentifier, IComponent, IGameObject, Constructor, GuidsMap, UIDProvider, Collision, ICollider } from "../engine/engine_types";
10
10
  import { addNewComponent, destroyComponentInstance, findObjectOfType, findObjectsOfType, getComponent, getComponentInChildren, getComponentInParent, getComponents, getComponentsInChildren, getComponentsInParent, getOrAddComponent, moveComponentInstance, removeComponent } from "../engine/engine_components";
11
11
  import { findByGuid, destroy, InstantiateOptions, instantiate, HideFlags, foreachComponent, markAsInstancedRendered, isActiveInHierarchy, isActiveSelf, isUsingInstancing, setActive, isDestroyed } from "../engine/engine_gameobject";
12
+ import { resolveUrl } from "../engine/engine_utils";
12
13
 
13
14
 
14
15
  // export interface ISerializationCallbackReceiver {
@@ -641,7 +642,6 @@
641
642
 
642
643
  return false;
643
644
  }
644
-
645
645
  }
646
646
 
647
647
  class Behaviour extends Component {
src/engine-components/codegen/components.ts CHANGED
@@ -145,6 +145,7 @@
145
145
  export { TestSimulateUserData } from "../TestRunner";
146
146
  export { Text } from "../ui/Text";
147
147
  export { TextureSheetAnimationModule } from "../ParticleSystemModules";
148
+ export { TiltShiftEffect } from "../postprocessing/Effects/TiltShiftEffect";
148
149
  export { ToneMapping } from "../postprocessing/Effects/Tonemapping";
149
150
  export { TrailModule } from "../ParticleSystemModules";
150
151
  export { TransformData } from "../export/usdz/extensions/Animation";
@@ -168,7 +169,10 @@
168
169
  export { WebXR } from "../WebXR";
169
170
  export { WebXRAvatar } from "../WebXRAvatar";
170
171
  export { WebXRController } from "../WebXRController";
172
+ export { WebXRImageTracking } from "../WebXRImageTracking";
173
+ export { WebXRImageTrackingModel } from "../WebXRImageTracking";
171
174
  export { WebXRSync } from "../WebXRSync";
175
+ export { WebXRTrackedImage } from "../WebXRImageTracking";
172
176
  export { XRFlag } from "../XRFlag";
173
177
  export { XRGrabModel } from "../WebXRGrabRendering";
174
178
  export { XRGrabRendering } from "../WebXRGrabRendering";
src/engine-components/postprocessing/Effects/DepthOfField.ts CHANGED
@@ -36,6 +36,9 @@
36
36
  @serializable(VolumeParameter)
37
37
  resolutionScale?: VolumeParameter;
38
38
 
39
+ @serializable(VolumeParameter)
40
+ bokehScale?: VolumeParameter;
41
+
39
42
  init() {
40
43
  this.focalLength.valueProcessor = v => {
41
44
  const t = v / 300;
src/engine/engine_addressables.ts CHANGED
@@ -1,8 +1,8 @@
1
- import { getParam, getPath } from "../engine/engine_utils";
1
+ import { getParam, resolveUrl } from "../engine/engine_utils";
2
2
  // import { loadSync, parseSync } from "./engine_scenetools";
3
3
  import { SerializationContext, TypeSerializer } from "./engine_serialization_core";
4
4
  import { Context } from "./engine_setup";
5
- import { Group, Object3D, Scene } from "three";
5
+ import { Group, Object3D, Scene, Texture } from "three";
6
6
  import { processNewScripts } from "./engine_mainloop_utils";
7
7
  import { registerPrefabProvider, syncInstantiate } from "./engine_networking_instantiate";
8
8
  import { download, hash } from "./engine_web_api";
@@ -50,7 +50,7 @@
50
50
  export class AssetReference {
51
51
 
52
52
  static getOrCreate(sourceId: SourceIdentifier, uri: string, context: Context): AssetReference {
53
- const fullPath = getPath(sourceId, uri);
53
+ const fullPath = resolveUrl(sourceId, uri);
54
54
  if (debug) console.log("GetOrCreate Addressable from", sourceId, uri, "FinalPath=", fullPath);
55
55
  const addressables = context.addressables;
56
56
  const existing = addressables.findAssetReference(fullPath);
@@ -72,15 +72,16 @@
72
72
 
73
73
  private _loading?: PromiseLike<any>;
74
74
 
75
+ // TODO: rename to url
75
76
  get uri(): string {
76
- return this._uri;
77
+ return this._url;
77
78
  }
78
79
 
79
80
  get rawAsset(): any { return this._asset; }
80
81
 
81
82
  private _asset: any;
82
83
  private _glbRoot?: Object3D | null;
83
- private _uri: string;
84
+ private _url: string;
84
85
  private _progressListeners: ProgressCallback[] = [];
85
86
 
86
87
  private _hash?: string;
@@ -90,14 +91,14 @@
90
91
  private _rawBinary?: ArrayBuffer | null;
91
92
 
92
93
  constructor(uri: string, hash?: string) {
93
- this._uri = uri;
94
+ this._url = uri;
94
95
  this._hash = hash;
95
96
  if (uri.includes("?v="))
96
97
  this._hashedUri = uri;
97
98
  else
98
99
  this._hashedUri = hash ? uri + "?v=" + hash : uri;
99
100
 
100
- registerPrefabProvider(this._uri, this.onResolvePrefab.bind(this));
101
+ registerPrefabProvider(this._url, this.onResolvePrefab.bind(this));
101
102
  }
102
103
 
103
104
  private async onResolvePrefab(uri: string): Promise<IGameObject | null> {
@@ -330,4 +331,97 @@
330
331
  }
331
332
 
332
333
  }
333
- new AddressableSerializer();
334
+ new AddressableSerializer();
335
+
336
+
337
+
338
+
339
+
340
+
341
+ export class ImageReference {
342
+
343
+ private static imageReferences = new Map<string, ImageReference>();
344
+
345
+ static getOrCreate(url: string) {
346
+ let ref = ImageReference.imageReferences.get(url);
347
+ if (!ref) {
348
+ ref = new ImageReference(url);
349
+ ImageReference.imageReferences.set(url, ref);
350
+ }
351
+ return ref;
352
+ }
353
+
354
+ constructor(url: string) {
355
+ this.url = url;
356
+ }
357
+
358
+ readonly url!: string;
359
+
360
+ private _bitmap?: Promise<ImageBitmap | null>;
361
+ private _bitmapObject?: ImageBitmap;
362
+
363
+ dispose() {
364
+ if (this._bitmapObject) {
365
+ this._bitmapObject.close();
366
+ }
367
+ this._bitmap = undefined;
368
+ }
369
+
370
+ createHTMLImage(): HTMLImageElement {
371
+ const img = new Image();
372
+ img.src = this.url;
373
+ return img;
374
+ }
375
+
376
+ createTexture(): Promise<Texture | null> {
377
+ return this.getBitmap().then((bitmap) => {
378
+ if (bitmap) {
379
+ const texture = new Texture(bitmap);
380
+ texture.needsUpdate = true;
381
+ return texture;
382
+ }
383
+ return null;
384
+ });
385
+ }
386
+
387
+ /** Loads the bitmap data of the image */
388
+ getBitmap(): Promise<ImageBitmap | null> {
389
+ if (this._bitmap) return this._bitmap;
390
+ this._bitmap = new Promise((res, _) => {
391
+ const imageElement = document.createElement("img") as HTMLImageElement;
392
+ imageElement.addEventListener("load", () => {
393
+ this._bitmap = createImageBitmap(imageElement).then((bitmap) => {
394
+ this._bitmapObject = bitmap;
395
+ res(bitmap);
396
+ return bitmap;
397
+ })
398
+ });
399
+ imageElement.addEventListener("error", err => {
400
+ console.error("Failed to load image:" + this.url, err);
401
+ res(null);
402
+ });
403
+ imageElement.src = this.url;
404
+ });
405
+ return this._bitmap;
406
+ }
407
+ }
408
+
409
+
410
+ export class ImageReferenceSerializer extends TypeSerializer {
411
+ constructor() {
412
+ super([ImageReference]);
413
+ }
414
+
415
+ onSerialize(_data: string, _context: SerializationContext) {
416
+ return null;
417
+ }
418
+
419
+ onDeserialize(data: string, _context: SerializationContext) {
420
+ if (typeof data === "string") {
421
+ const url = resolveUrl(_context.gltfId, data)
422
+ return ImageReference.getOrCreate(url);
423
+ }
424
+ return undefined;
425
+ }
426
+ }
427
+ new ImageReferenceSerializer();
src/engine/engine_serialization_builtin_serializer.ts CHANGED
@@ -7,6 +7,7 @@
7
7
  import { Color, Object3D, Texture, WebGLRenderTarget } from "three";
8
8
  import { RenderTexture } from "./engine_texture";
9
9
  import { isDevEnvironment } from "../engine/debug/debug";
10
+ import { resolveUrl } from "./engine_utils";
10
11
 
11
12
  // export class SourcePath {
12
13
  // src?:string
@@ -300,8 +301,8 @@
300
301
  return undefined;
301
302
  }
302
303
 
303
- private createEventMethod(target : object, methodName: string, args?: any) : Function | undefined {
304
-
304
+ private createEventMethod(target: object, methodName: string, args?: any): Function | undefined {
305
+
305
306
  return (...forwardedArgs) => {
306
307
  const method = target[methodName];
307
308
  if (typeof method === "function") {
@@ -339,4 +340,23 @@
339
340
  return undefined;
340
341
  }
341
342
  }
342
- new RenderTextureSerializer();
343
+ new RenderTextureSerializer();
344
+
345
+
346
+ export class UriSerializer extends TypeSerializer {
347
+ constructor() {
348
+ super([URL]);
349
+ }
350
+
351
+ onSerialize(_data: string, _context: SerializationContext) {
352
+ return null;
353
+ }
354
+
355
+ onDeserialize(data: string, _context: SerializationContext) {
356
+ if (typeof data === "string") {
357
+ return resolveUrl(_context.gltfId, data);
358
+ }
359
+ return undefined;
360
+ }
361
+ }
362
+ new UriSerializer();
src/engine/engine_serialization_core.ts CHANGED
@@ -173,6 +173,7 @@
173
173
  root: THREE.Object3D;
174
174
 
175
175
  gltf?: GLTF;
176
+ /** the url of the glb that is currently being loaded */
176
177
  gltfId?: SourceIdentifier;
177
178
  object!: THREE.Object3D;
178
179
  target?: object;
@@ -290,16 +291,21 @@
290
291
  context.type = undefined;
291
292
  context.path = key;
292
293
 
294
+ if (obj.onBeforeDeserializeMember !== undefined) {
295
+ // callback to the instance, if it returns true assume it's done all the things itself
296
+ if (obj.onBeforeDeserializeMember(key, data, context) === true) continue;
297
+ }
298
+
293
299
  if (serializedEntryInfo === null) {
294
300
  obj[key] = data;
301
+ // if(typeof data === "string"){
302
+ // const serializer = helper.getSerializerForConstructor(String);
303
+ // const res = serializer?.onDeserialize(data, context);
304
+ // if(res !== undefined) obj[key] = res;
305
+ // }
295
306
  }
296
307
  else {
297
308
 
298
- if (obj.onBeforeDeserializeMember !== undefined) {
299
- // callback to the instance, if it returns true assume it's done all the things itself
300
- if (obj.onBeforeDeserializeMember(key, data, context) === true) continue;
301
- }
302
-
303
309
  if (Array.isArray(serializedEntryInfo)) {
304
310
  for (let i = 0; i < serializedEntryInfo.length; i++) {
305
311
  const typeInfoOrConstructor = serializedEntryInfo[i];
@@ -314,25 +320,26 @@
314
320
  obj[key] = tryResolve(serializedEntryInfo);
315
321
  }
316
322
 
317
- function tryResolve(typeInfoOrConstructor) {
318
- const typeInformationOrConstructor = typeInfoOrConstructor as ITypeInformation;
319
- // if the entry does specify an object of type ITypeInformation and has the type field set
320
- const type = typeInformationOrConstructor.type;
321
- if (type) {
322
- return deserializeObjectWithType(data, type, context, undefined, obj[key]);
323
- }
324
- // it can also just contain a constructor
325
- else {
326
- const constructor = typeInfoOrConstructor as Constructor<any>;
327
- return deserializeObjectWithType(data, constructor, context, undefined, obj[key]);
328
- }
329
- }
330
323
 
331
324
  buffer.length = 0;
325
+ }
332
326
 
333
- if (obj.onAfterDeserializeMember !== undefined) {
334
- obj.onAfterDeserializeMember(key, data, context);
327
+ if (obj.onAfterDeserializeMember !== undefined) {
328
+ obj.onAfterDeserializeMember(key, data, context);
329
+ }
330
+
331
+ function tryResolve(typeInfoOrConstructor) {
332
+ const typeInformationOrConstructor = typeInfoOrConstructor as ITypeInformation;
333
+ // if the entry does specify an object of type ITypeInformation and has the type field set
334
+ const type = typeInformationOrConstructor.type;
335
+ if (type) {
336
+ return deserializeObjectWithType(data, type, context, undefined, obj[key]);
335
337
  }
338
+ // it can also just contain a constructor
339
+ else {
340
+ const constructor = typeInfoOrConstructor as Constructor<any>;
341
+ return deserializeObjectWithType(data, constructor, context, undefined, obj[key]);
342
+ }
336
343
  }
337
344
  }
338
345
  }
src/engine/engine_serialization.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { serializeObject, deserializeObject } from "./engine_serialization_core";
2
- import * as builtin from "./engine_serialization_builtin_serializer";
3
- // export builtin so it will be called and registered
4
- export { serializeObject, deserializeObject, builtin };
5
2
 
6
- export { serializable, serializeable } from "./engine_serialization_decorator"
3
+ export { serializeObject, deserializeObject };
4
+
5
+ export { serializable, serializeable } from "./engine_serialization_decorator"
6
+
7
+ export * from "./engine_serialization_builtin_serializer";
src/engine/engine_utils.ts CHANGED
@@ -203,30 +203,51 @@
203
203
  });
204
204
  }
205
205
 
206
- // if a timeline is exported via menu item the audio clip path is relative to the glb (same folder)
206
+ // 1) if a timeline is exported via menu item the audio clip path is relative to the glb (same folder)
207
207
  // we need to detect that here and build the new audio source path relative to the new glb location
208
208
  // the same is/might be true for any file that is/will be exported via menu item
209
- const debugGetPath = getParam("debugsourcepath");
210
- export function getPath(source: SourceIdentifier | undefined, uri: string): string {
211
- if (source === undefined) {
212
- if (debugGetPath) console.warn("getPath: source is undefined, returning uri", uri);
209
+ // 2) if the needle.config assetDirectory is modified (from e.g. /assets to /needle/assets) when building a distributable our vite transform and copy plugin will move the files to dist/assets hence we cannot use project-relative paths (because the path changes). What we do instead if make all paths serialized in a glb relative to the glb. The rel: prefix is used to detect urls that need to be resolved.
210
+ const debugGetPath = getParam("debugresolveurl");
211
+
212
+ export const relativePathPrefix = "rel:";
213
+
214
+ /** @deprecated use resolveUrl instead */
215
+ export function getPath(source:SourceIdentifier|undefined, uri:string) : string {
216
+ return resolveUrl(source, uri);
217
+ }
218
+ /**
219
+ * Use to resolve a url serialized in a glTF file
220
+ * @param source The uri of the loading file
221
+ * @param uri The uri of the file to resolve, can be absolute or relative
222
+ * @returns The resolved uri
223
+ */
224
+ export function resolveUrl(source: SourceIdentifier | undefined, uri: string): string {
225
+ if (uri === undefined) {
226
+ if (debugGetPath) console.warn("getPath: uri is undefined, returning uri", uri);
213
227
  return uri;
214
228
  }
229
+ if(uri.startsWith("./")) {
230
+ return uri;
231
+ }
215
232
  if (uri.startsWith("http")) {
216
233
  if (debugGetPath) console.warn("getPath: uri is absolute, returning uri", uri);
217
234
  return uri;
218
235
  }
236
+ if (source === undefined) {
237
+ if (debugGetPath) console.warn("getPath: source is undefined, returning uri", uri);
238
+ return uri;
239
+ }
240
+ if(uri.startsWith(relativePathPrefix)){
241
+ uri = uri.substring(4);
242
+ }
219
243
  const pathIndex = source.lastIndexOf("/");
220
244
  if (pathIndex >= 0) {
221
- let newUri = source.substring(0, pathIndex + 1);
222
-
223
- const uriDirectoryIndex = uri.lastIndexOf("/");
224
- if (uriDirectoryIndex >= 0) {
225
- newUri += uri.substring(uriDirectoryIndex + 1);
226
- } else {
227
- newUri += uri;
228
- }
229
- if (debugGetPath) console.log("getPath:", source, " - changed uri from\n", uri, "\n→ ", newUri);
245
+ // Take the source uri as the base path
246
+ const basePath = source.substring(0, pathIndex + 1);
247
+ // Append the relative uri
248
+ let newUri = basePath + uri;
249
+ // newUri = new URL(newUri, globalThis.location.href).href;
250
+ if (debugGetPath) console.log("source:", source, "- changed uri \nfrom", uri, "\n→ ", newUri, "\n" + basePath);
230
251
  return newUri;
231
252
  }
232
253
  return uri;
src/engine/extensions/NEEDLE_animator_controller_model.ts CHANGED
@@ -32,6 +32,7 @@
32
32
  export declare type State = {
33
33
  name: string,
34
34
  hash: number;
35
+ speed?: number;
35
36
  motion: Motion,
36
37
  transitions: Transition[],
37
38
  behaviours: StateMachineBehaviourModel[],
src/engine/extensions/NEEDLE_progressive.ts CHANGED
@@ -3,7 +3,7 @@
3
3
  import { SourceIdentifier } from "../engine_types";
4
4
  import { Context } from "../engine_setup";
5
5
  import { addDracoAndKTX2Loaders } from "../engine_loaders";
6
- import { delay, getParam, getPath } from "../engine_utils";
6
+ import { delay, getParam, resolveUrl } from "../engine_utils";
7
7
 
8
8
  export const EXTENSION_NAME = "NEEDLE_progressive";
9
9
 
@@ -130,7 +130,7 @@
130
130
  if (progressiveInfo) {
131
131
  if (debug)
132
132
  console.log(key, progressiveInfo.uri, progressiveInfo.guid);
133
- const uri = getPath(source, progressiveInfo.uri);
133
+ const uri = resolveUrl(source, progressiveInfo.uri);
134
134
  if (uri.endsWith(".glb") || uri.endsWith(".gltf")) {
135
135
  if (!progressiveInfo.guid) {
136
136
  console.warn("missing pointer for glb/gltf texture", progressiveInfo);
src/engine-components/ParticleSystem.ts CHANGED
@@ -193,6 +193,7 @@
193
193
  }
194
194
 
195
195
  genValue(): number {
196
+ if (!this.system.isPlaying) return 0;
196
197
  if (!this.system.emission.enabled) return 0;
197
198
  if (this.system.currentParticles >= this.system.maxParticles) return 0;
198
199
  // emission over time
@@ -220,6 +221,7 @@
220
221
  class ParticleSystemEmissionOverDistance extends BaseValueGenerator {
221
222
 
222
223
  genValue(): number {
224
+ if (!this.system.isPlaying) return 0;
223
225
  // this seems not be called yet
224
226
  return 0;
225
227
  // if (this.system.currentParticles >= this.system.maxParticles) return 0;
@@ -547,7 +549,7 @@
547
549
  }
548
550
  switch (this.system.renderer.renderMode) {
549
551
  case ParticleSystemRenderMode.Billboard: return RenderMode.BillBoard;
550
- // case ParticleSystemRenderMode.Stretch: return RenderMode.Stretch;
552
+ case ParticleSystemRenderMode.Stretch: return RenderMode.StretchedBillBoard;
551
553
  case ParticleSystemRenderMode.HorizontalBillboard: return RenderMode.LocalSpace;
552
554
  case ParticleSystemRenderMode.VerticalBillboard: return RenderMode.LocalSpace;
553
555
  case ParticleSystemRenderMode.Mesh: return RenderMode.LocalSpace;
@@ -597,8 +599,8 @@
597
599
  }
598
600
  }, true)
599
601
  }
602
+
600
603
  this._isPlaying = true;
601
- this._time = 0;
602
604
 
603
605
  // https://github.com/Alchemist0823/three.quarks/pull/35
604
606
  if (this._particleSystem) {
@@ -608,14 +610,42 @@
608
610
  this.emission?.reset();
609
611
  }
610
612
 
611
- pause() {
613
+ pause(includeChildren = true) {
614
+ if (includeChildren) {
615
+ GameObject.foreachComponent(this.gameObject, comp => {
616
+ if (comp instanceof ParticleSystem && comp !== this) {
617
+ comp.pause(false);
618
+ }
619
+ }, true)
620
+ }
612
621
  this._isPlaying = false;
613
622
  }
614
- stop() {
623
+
624
+ /** clear=true removes all emitted particles */
625
+ stop(includeChildren = true, clear: boolean = false) {
626
+ if (includeChildren) {
627
+ GameObject.foreachComponent(this.gameObject, comp => {
628
+ if (comp instanceof ParticleSystem && comp !== this) {
629
+ comp.stop(false, clear);
630
+ }
631
+ }, true)
632
+ }
615
633
  this._isPlaying = false;
616
634
  this._time = 0;
635
+ if (clear) this.reset();
617
636
  }
618
637
 
638
+ /** remove emitted particles and reset time */
639
+ reset() {
640
+ this._time = 0;
641
+ if (this._particleSystem) {
642
+ this._particleSystem.particleNum = 0;
643
+ this._particleSystem["emissionState"].time = 0;
644
+ this._particleSystem["emitEnded"] = false;
645
+ this.emission?.reset();
646
+ }
647
+ }
648
+
619
649
  private _state?: ParticlesEmissionState;
620
650
  emit(count: number) {
621
651
  if (this._particleSystem) {
src/engine-components/ParticleSystemModules.ts CHANGED
@@ -35,7 +35,7 @@
35
35
 
36
36
  export enum ParticleSystemRenderMode {
37
37
  Billboard = 0,
38
- // Stretch = 1,
38
+ Stretch = 1,
39
39
  HorizontalBillboard = 2,
40
40
  VerticalBillboard = 3,
41
41
  Mesh = 4,
src/engine-components/Skybox.ts CHANGED
@@ -5,7 +5,7 @@
5
5
  import { EquirectangularRefractionMapping, sRGBEncoding, Texture, TextureLoader } from "three"
6
6
  import { syncField } from "../engine/engine_networking_auto";
7
7
  import { Camera } from "./Camera";
8
- import { getParam, getPath } from "../engine/engine_utils";
8
+ import { getParam, resolveUrl } from "../engine/engine_utils";
9
9
 
10
10
  const debug = getParam("debugskybox");
11
11
 
@@ -56,7 +56,7 @@
56
56
  if(debug) console.log("Remote skybox url?: " + url);
57
57
 
58
58
  if (!url.startsWith("http") && !url.startsWith("www.") && !url.startsWith("data:")) {
59
- url = getPath(this.sourceId, url);
59
+ url = resolveUrl(this.sourceId, url);
60
60
  if(debug) console.log("Remote skybox resolved to " + url);
61
61
  }
62
62
 
src/engine-components/ui/Text.ts CHANGED
@@ -7,7 +7,7 @@
7
7
  import { updateRenderSettings } from './Utils';
8
8
  import { Canvas } from './Canvas';
9
9
  import { serializable } from '../../engine/engine_serialization_decorator';
10
- import { getParam, getPath } from '../../engine/engine_utils';
10
+ import { getParam, resolveUrl } from '../../engine/engine_utils';
11
11
 
12
12
  const debug = getParam("debugtext");
13
13
 
@@ -481,24 +481,24 @@
481
481
 
482
482
  // if a font path has a known suffix we remove it
483
483
  if (fontName.endsWith("-regular")) {
484
- if (style === FontStyle.Normal) return getPath(this.sourceId, fontName);
484
+ if (style === FontStyle.Normal) return resolveUrl(this.sourceId, fontName);
485
485
  fontName = fontName.substring(0, fontName.length - "-regular".length);
486
486
  }
487
487
  else if (fontName.endsWith("-bold")) {
488
- if (style === FontStyle.Bold)return getPath(this.sourceId, fontName);
488
+ if (style === FontStyle.Bold)return resolveUrl(this.sourceId, fontName);
489
489
  fontName = fontName.substring(0, fontName.length - "-bold".length);
490
490
  }
491
491
  else if (fontName.endsWith("-italic")) {
492
- if (style === FontStyle.Italic)return getPath(this.sourceId, fontName);
492
+ if (style === FontStyle.Italic)return resolveUrl(this.sourceId, fontName);
493
493
  fontName = fontName.substring(0, fontName.length - "-italic".length);
494
494
  }
495
495
  else if (fontName.endsWith("-bolditalic")) {
496
- if (style === FontStyle.BoldAndItalic)return getPath(this.sourceId, fontName);
496
+ if (style === FontStyle.BoldAndItalic)return resolveUrl(this.sourceId, fontName);
497
497
  fontName = fontName.substring(0, fontName.length - "-bolditalic".length);
498
498
  }
499
499
  else
500
500
  // If a font does not have a specific style suffic we dont support getting the correct font style
501
- return getPath(this.sourceId, fontName);
501
+ return resolveUrl(this.sourceId, fontName);
502
502
 
503
503
  switch (style) {
504
504
  case FontStyle.Normal:
@@ -515,7 +515,7 @@
515
515
  break;
516
516
  }
517
517
 
518
- return getPath(this.sourceId, fontName);
518
+ return resolveUrl(this.sourceId, fontName);
519
519
  }
520
520
  }
521
521
 
src/engine-components/timeline/TimelineTracks.ts CHANGED
@@ -5,7 +5,7 @@
5
5
  import { Context } from "../../engine/engine_setup";
6
6
  import { SignalReceiver } from "./SignalAsset";
7
7
  import { AnimationClip, Quaternion, Vector3 } from "three";
8
- import { getParam, getPath } from "../../engine/engine_utils";
8
+ import { getParam, resolveUrl } from "../../engine/engine_utils";
9
9
  import { AudioSource } from "../AudioSource";
10
10
  import { Animator } from "../Animator"
11
11
 
@@ -517,7 +517,7 @@
517
517
  private getAudioFilePath(path: string) {
518
518
  // TODO: this should be the timeline asset location probably which MIGHT be different
519
519
  const glbLocation = this.director.sourceId;
520
- return getPath(glbLocation, path);
520
+ return resolveUrl(glbLocation, path);
521
521
  }
522
522
 
523
523
  onAllowAudioChanged(allow: boolean) {
src/engine-components/VideoPlayer.ts CHANGED
@@ -3,7 +3,7 @@
3
3
  import { serializable } from "../engine/engine_serialization_decorator";
4
4
  import { LinearFilter, Material, Mesh, Object3D, RawShaderMaterial, ShaderMaterial, Texture, TextureLoader, Vector2, Vector4, VideoTexture } from "three";
5
5
  import { awaitInput } from "../engine/engine_input_utils";
6
- import { getParam } from "../engine/engine_utils";
6
+ import { getParam, resolveUrl } from "../engine/engine_utils";
7
7
  import { Renderer } from "./Renderer";
8
8
  import { getWorldScale } from "../engine/engine_three_utils";
9
9
  import { ObjectUtils, PrimitiveType } from "../engine/engine_create_objects";
@@ -49,11 +49,12 @@
49
49
  renderer: THREE.Object3D | null = null;
50
50
  @serializable()
51
51
  playOnAwake: boolean = true;
52
- @serializable()
53
- playOnEnable?: boolean;
54
52
 
55
53
  @serializable()
56
54
  aspectMode: AspectMode = AspectMode.None;
55
+
56
+ @serializable(URL)
57
+ private clip?: string | MediaStream | null = null;
57
58
 
58
59
  @serializable()
59
60
  private renderMode?: VideoRenderMode;
@@ -153,7 +154,6 @@
153
154
  private audioOutputMode: VideoAudioOutputMode = VideoAudioOutputMode.AudioSource;
154
155
 
155
156
  private source!: VideoSource;
156
- private clip?: string | MediaStream | null = null;
157
157
  private url?: string | null = null;
158
158
 
159
159
  private _videoElement: HTMLVideoElement | null = null;
@@ -209,7 +209,7 @@
209
209
  }
210
210
 
211
211
  onEnable(): void {
212
- if (this.playOnEnable === true) {
212
+ if (this.playOnAwake === true) {
213
213
  this.handleBeginPlaying(true);
214
214
  }
215
215
  if (this.screenspace) {
@@ -422,7 +422,7 @@
422
422
  let muted = !this._receivedInput && this.audioOutputMode !== VideoAudioOutputMode.None;
423
423
  if(!muted && this._muted) muted = true;
424
424
  this._videoElement.muted = muted;
425
- if (this.playOnAwake || this.playOnEnable)
425
+ if (this.playOnAwake)
426
426
  this._videoElement.autoplay = true;
427
427
  }
428
428
 
src/engine-components/WebXR.ts CHANGED
@@ -47,6 +47,7 @@
47
47
  XRStopped = "xrStopped",
48
48
  XRUpdate = "xrUpdate",
49
49
  RequestVRSession = "requestVRSession",
50
+ ModifyAROptions = "modify-ar-options",
50
51
  }
51
52
 
52
53
  export declare type CreateButtonOptions = {
@@ -91,6 +92,9 @@
91
92
  this.events.removeEventListener(type, listener);
92
93
  return listener;
93
94
  }
95
+ private static dispatchEvent(type: string, event: any): void {
96
+ this.events.dispatchEvent({ type, detail: event });
97
+ }
94
98
 
95
99
  public static createVRButton(webXR: WebXR, opts?: CreateButtonOptions): HTMLButtonElement | HTMLAnchorElement {
96
100
  if (!WebXR.XRSupported) {
@@ -122,7 +126,7 @@
122
126
  console.warn("No dom overlay root found, HTML overlays on top of screen-based AR will not work.");
123
127
  }
124
128
 
125
- const arButton = ARButton.createButton(webXR.context.renderer, options);
129
+ const arButton = ARButton.createButton(webXR.context.renderer, options, this.onModifyAROptions.bind(this));
126
130
  arButton.classList.add('webxr-ar-button');
127
131
  arButton.classList.add('webxr-button');
128
132
  WebXR.resetButtonStyles(arButton);
@@ -131,6 +135,10 @@
131
135
  return arButton;
132
136
  }
133
137
 
138
+ private static onModifyAROptions(options){
139
+ WebXR.dispatchEvent(WebXREvent.ModifyAROptions, options);
140
+ }
141
+
134
142
  public static resetButtonStyles(button) {
135
143
  if (!button) return;
136
144
  button.style.position = "";
@@ -233,7 +241,8 @@
233
241
  this.context.domElement.append(buttonsContainer);
234
242
 
235
243
  // AR support
236
- if (this.enableAR && this.createARButton && arSupported) {
244
+ // if (this.enableAR && this.createARButton && arSupported)
245
+ {
237
246
  arButton = WebXR.createARButton(this);
238
247
  this._arButton = arButton;
239
248
  buttonsContainer.appendChild(arButton);
src/engine-components/WebXRController.ts CHANGED
@@ -8,7 +8,7 @@
8
8
  import { Mathf } from "../engine/engine_math";
9
9
  import { RaycastOptions } from "../engine/engine_physics";
10
10
  import { getWorldPosition, getWorldQuaternion, setWorldPosition, setWorldQuaternion } from "../engine/engine_three_utils";
11
- import { getParam, getPath } from "../engine/engine_utils";
11
+ import { getParam, resolveUrl } from "../engine/engine_utils";
12
12
  import { addDracoAndKTX2Loaders } from "../engine/engine_loaders";
13
13
 
14
14
  import { Avatar_POI } from "./avatar/Avatar_Brain_LookAt";
@@ -85,7 +85,7 @@
85
85
  const loader = new GLTFLoader();
86
86
  addDracoAndKTX2Loaders(loader, context);
87
87
  if (ctrl.webXR.handModelPath && ctrl.webXR.handModelPath !== "")
88
- loader.setPath(getPath(owner.sourceId, ctrl.webXR.handModelPath));
88
+ loader.setPath(resolveUrl(owner.sourceId, ctrl.webXR.handModelPath));
89
89
  else
90
90
  // from XRHandMeshModel.js
91
91
  loader.setPath('https://cdn.jsdelivr.net/npm/@webxr-input-profiles/[email protected]/dist/profiles/generic-hand/');
plugins/vite/transform-codegen.js ADDED
@@ -0,0 +1,45 @@
1
+ import { builtAssetsDirectory, tryLoadProjectConfig } from './config.js';
2
+
3
+ /**
4
+ * modify the glb load path in codegen files
5
+ * this is necessary if the assets directory is not the default (changed by the user in needle.config.json)
6
+ */
7
+ export const needleTransformCodegen = (command, config, userSettings) => {
8
+
9
+ if (config?.noCodegenTransform === true || userSettings?.noCodegenTransform === true) {
10
+ return;
11
+ }
12
+
13
+ let codegenDirectory = "src/generated";
14
+ const needleConfig = tryLoadProjectConfig();
15
+ if (needleConfig?.codegenDirectory?.length)
16
+ codegenDirectory = needleConfig.codegenDirectory;
17
+
18
+ let configuredAssetsDirectory = "assets";
19
+ if (needleConfig?.assetsDirectory?.length)
20
+ configuredAssetsDirectory = needleConfig.assetsDirectory;
21
+
22
+ // https://regex101.com/r/Y05z9P/1
23
+ // const matchCodegenFilePaths = /\"(.+)\/.+?\.(glb|gltf)/g;
24
+
25
+ return [
26
+ {
27
+ name: 'needle-transform-files',
28
+ apply: 'build',
29
+ transform(src, id) {
30
+ if (id.endsWith(codegenDirectory + "/gen.js")) {
31
+ const assetsDir = builtAssetsDirectory();
32
+ if (assetsDir !== configuredAssetsDirectory) {
33
+ console.log(`[needle-transform-files] - Transform codegen paths \"${configuredAssetsDirectory}\" → \"${assetsDir}\"`)
34
+ // replace codegen paths
35
+ src = src.replaceAll(configuredAssetsDirectory, assetsDir);
36
+ return {
37
+ code: src,
38
+ map: null
39
+ }
40
+ }
41
+ }
42
+ }
43
+ }
44
+ ];
45
+ }
src/engine-components/postprocessing/Effects/TiltShiftEffect.ts ADDED
@@ -0,0 +1,56 @@
1
+ import { registerCustomEffectType } from "../VolumeProfile";
2
+ import { EffectProviderResult, PostProcessingEffect } from "../PostProcessingEffect";
3
+ import { KernelSize, TiltShiftEffect as TiltShift } from "postprocessing";
4
+ import { VolumeParameter } from "../VolumeParameter";
5
+ import { serializable } from "../../../engine/engine_serialization";
6
+
7
+
8
+ export class TiltShiftEffect extends PostProcessingEffect {
9
+ get typeName(): string {
10
+ return "TiltShiftEffect";
11
+ }
12
+
13
+ @serializable(VolumeParameter)
14
+ offset!: VolumeParameter;
15
+ @serializable(VolumeParameter)
16
+ rotation!: VolumeParameter;
17
+ @serializable(VolumeParameter)
18
+ focusArea!: VolumeParameter;
19
+ @serializable(VolumeParameter)
20
+ feather!: VolumeParameter
21
+ @serializable(VolumeParameter)
22
+ kernelSize!: VolumeParameter;
23
+ @serializable(VolumeParameter)
24
+ resolutionScale!: VolumeParameter;
25
+
26
+ init(): void {
27
+ this.offset.defaultValue = 0;
28
+ this.rotation.defaultValue = 0;
29
+ this.focusArea.defaultValue = 0.4;
30
+ this.feather.defaultValue = 0.3;
31
+ this.kernelSize.defaultValue = KernelSize.MEDIUM;
32
+ this.resolutionScale.defaultValue = 1 / window.devicePixelRatio;
33
+ }
34
+
35
+
36
+ onCreateEffect(): EffectProviderResult | undefined {
37
+
38
+ console.log(this);
39
+
40
+ const effect = new TiltShift({
41
+ kernelSize: KernelSize.VERY_LARGE,
42
+ });
43
+
44
+ this.offset.onValueChanged = v => effect.offset = v;
45
+ this.rotation.onValueChanged = v => effect.rotation = v;
46
+ this.focusArea.onValueChanged = v => effect.focusArea = v;
47
+ this.feather.onValueChanged = v => effect.feather = v;
48
+ this.kernelSize.onValueChanged = v => effect.blurPass.kernelSize = v;
49
+ this.resolutionScale.onValueChanged = v => effect.resolution.scale = v / window.devicePixelRatio;
50
+
51
+
52
+ return effect;
53
+ }
54
+
55
+ }
56
+ registerCustomEffectType("TiltShiftEffect", TiltShiftEffect);
src/engine-components/WebXRImageTracking.ts ADDED
@@ -0,0 +1,192 @@
1
+ import { WebXR } from "./WebXR";
2
+ import { serializable } from "../engine/engine_serialization";
3
+ import { Behaviour } from "./Component";
4
+ import { Matrix4, Object3D, Quaternion, Vector, Vector3 } from "three";
5
+ import { CircularBuffer, getParam } from "../engine/engine_utils";
6
+
7
+ // https://github.com/immersive-web/marker-tracking/blob/main/explainer.md
8
+
9
+ const debug = getParam("debugimagetracking");
10
+
11
+ const _scaleTemp = new Vector3();
12
+
13
+ export class WebXRTrackedImage {
14
+
15
+
16
+ get url(): string { return this._trackedImage.image ?? ""; }
17
+ get widthInMeters() { return this._trackedImage.widthInMeters ?? undefined; }
18
+ get bitmap(): ImageBitmap { return this._bitmap; }
19
+ readonly measuredSize: number;
20
+ readonly state: "tracked" | "emulated";
21
+
22
+ // private _matrix: Matrix4 | null = null;
23
+ // private get matrix(): Matrix4 {
24
+ // if (!this._matrix) {
25
+ // // this._matrix = WebXRTrackedImage._matrixBuffer.get();
26
+ // // const matrix = this._pose.transform.matrix;
27
+ // // this._matrix.fromArray(matrix);
28
+ // }
29
+ // return this._matrix!;
30
+ // }
31
+
32
+ /** Copy the image position to a vector */
33
+ getPosition(vec: Vector3) {
34
+ this.ensureTransformData();
35
+ vec.copy(this._position);
36
+ return vec;
37
+ }
38
+
39
+ /** Copy the image rotation to a quaternion */
40
+ getQuaternion(quat: Quaternion) {
41
+ this.ensureTransformData();
42
+ quat.copy(this._rotation);
43
+ return quat;
44
+ }
45
+
46
+ applyToObject(object: Object3D) {
47
+ this.ensureTransformData();
48
+ object.position.copy(this._position);
49
+ object.quaternion.copy(this._rotation);
50
+ }
51
+
52
+ // private static _matrixBuffer: CircularBuffer<Matrix4> = new CircularBuffer(() => new Matrix4(), 20);
53
+ private static _positionBuffer: CircularBuffer<Vector3> = new CircularBuffer(() => new Vector3(), 20);
54
+ private static _rotationBuffer: CircularBuffer<Quaternion> = new CircularBuffer(() => new Quaternion(), 20);
55
+ private _position!: Vector3;
56
+ private _rotation!: Quaternion;
57
+ private ensureTransformData() {
58
+ if (!this._position) {
59
+ this._position = WebXRTrackedImage._positionBuffer.get();
60
+ this._rotation = WebXRTrackedImage._rotationBuffer.get();
61
+ const t = this._pose.transform;
62
+ this._position.set(-t.position.x, t.position.y, -t.position.z);
63
+ this._rotation.set(-t.orientation.x, t.orientation.y, -t.orientation.z, t.orientation.w);
64
+ }
65
+ }
66
+
67
+ private readonly _trackingComponent: WebXRImageTracking;;
68
+ private readonly _trackedImage: WebXRImageTrackingModel;
69
+ private readonly _bitmap: ImageBitmap;
70
+ private readonly _pose: any;
71
+
72
+ constructor(context: WebXRImageTracking, trackedImage: WebXRImageTrackingModel, bitmap: ImageBitmap, measuredSize: number, state: "tracked" | "emulated", pose: any) {
73
+ this._trackingComponent = context;;
74
+ this._trackedImage = trackedImage;
75
+ this._bitmap = bitmap;
76
+ this.measuredSize = measuredSize;
77
+ this.state = state;
78
+ this._pose = pose;
79
+ }
80
+
81
+ }
82
+
83
+ declare type WebXRImageTrackingEvent = (images: WebXRImageTrackingEvent[]) => void;
84
+
85
+ export class WebXRImageTrackingModel {
86
+
87
+ @serializable()
88
+ image?: string;
89
+
90
+ @serializable()
91
+ widthInMeters!: number;
92
+
93
+ }
94
+
95
+ export class WebXRImageTracking extends Behaviour {
96
+
97
+ @serializable(WebXRImageTrackingModel)
98
+ trackedImages!: WebXRImageTrackingModel[];
99
+
100
+
101
+
102
+
103
+ private readonly trackedImageIndexMap: Map<number, WebXRImageTrackingModel> = new Map();
104
+
105
+ private static _imageElements: Map<string, ImageBitmap | null> = new Map();
106
+
107
+ awake(): void {
108
+ for (const trackedImage of this.trackedImages) {
109
+ if (trackedImage.image) {
110
+ if (WebXRImageTracking._imageElements.has(trackedImage.image)) {
111
+ }
112
+ else {
113
+ const url = trackedImage.image;
114
+ WebXRImageTracking._imageElements.set(url, null);
115
+ const imageElement = document.createElement("img") as HTMLImageElement;
116
+ imageElement.src = url;
117
+ imageElement.addEventListener("load", async () => {
118
+ const img = await createImageBitmap(imageElement);
119
+ WebXRImageTracking._imageElements.set(url, img);
120
+ });
121
+ }
122
+ }
123
+ }
124
+ }
125
+
126
+ onEnable(): void {
127
+ WebXR.addEventListener("modify-ar-options", this.onModifyAROptions);
128
+ }
129
+
130
+ onDisable(): void {
131
+ WebXR.removeEventListener("modify-ar-options", this.onModifyAROptions);
132
+ }
133
+
134
+
135
+ private onModifyAROptions = (event: any) => {
136
+ const options = event.detail;
137
+ const features = options.optionalFeatures || [];
138
+ features.push("image-tracking");
139
+ options.optionalFeatures = features;
140
+
141
+ options.trackedImages = [];
142
+ for (const trackedImage of this.trackedImages) {
143
+ if (trackedImage.image?.length && trackedImage.widthInMeters > 0) {
144
+ const bitmap = WebXRImageTracking._imageElements.get(trackedImage.image);
145
+ if (bitmap) {
146
+ this.trackedImageIndexMap.set(options.trackedImages.length, trackedImage);
147
+ options.trackedImages.push({
148
+ image: bitmap,
149
+ widthInMeters: trackedImage.widthInMeters
150
+ });
151
+ }
152
+ }
153
+ }
154
+ }
155
+
156
+ onBeforeRender(frame: XRFrame | null): void {
157
+ //@ts-ignore
158
+ if (frame?.session && typeof frame.getImageTrackingResults === "function") {
159
+ //@ts-ignore
160
+ const results = frame.getImageTrackingResults();
161
+ if (results.length) {
162
+ const space = this.context.renderer.xr.getReferenceSpace();
163
+ if (space) {
164
+ const images: WebXRTrackedImage[] = [];
165
+ for (const result of results) {
166
+ const imageIndex = result.index;
167
+ const trackedImage = this.trackedImageIndexMap.get(imageIndex);
168
+ if (trackedImage) {
169
+ const pose = frame.getPose(result.imageSpace, space);
170
+ const state = result.trackingState;
171
+ const imageData = new WebXRTrackedImage(this, trackedImage, result.image, result.measuredSize, state, pose);
172
+ images.push(imageData);
173
+ }
174
+ else {
175
+ if (debug) {
176
+ console.warn("No tracked image for index", imageIndex);
177
+ }
178
+ }
179
+ }
180
+ if (images.length > 0) {
181
+ try {
182
+ this.dispatchEvent(new CustomEvent("image-tracking", { detail: images }));
183
+ }
184
+ catch (e) {
185
+ console.error(e);
186
+ }
187
+ }
188
+ }
189
+ }
190
+ }
191
+ }
192
+ }