@@ -4,7 +4,7 @@
|
|
4
4
|
* @param {string} code
|
5
5
|
* @param {string | null | undefined} licenseType
|
6
6
|
*/
|
7
|
-
module.exports.replaceLicense = function (code, licenseType) {
|
7
|
+
module.exports.replaceLicense = async function (code, licenseType) {
|
8
8
|
|
9
9
|
if (!licenseType) {
|
10
10
|
const meta = getMeta();
|
@@ -13,6 +13,9 @@
|
|
13
13
|
}
|
14
14
|
}
|
15
15
|
|
16
|
+
if (typeof licenseType === "object") {
|
17
|
+
licenseType = await this.resolveLicense(licenseType);
|
18
|
+
}
|
16
19
|
|
17
20
|
if (!licenseType) {
|
18
21
|
return code;
|
@@ -28,4 +31,59 @@
|
|
28
31
|
return code;
|
29
32
|
}
|
30
33
|
}
|
34
|
+
}
|
35
|
+
|
36
|
+
|
37
|
+
|
38
|
+
|
39
|
+
|
40
|
+
const LICENSE_ENDPOINT = `https://urls.needle.tools/license-endpoint`
|
41
|
+
|
42
|
+
/**
|
43
|
+
* Resolve the license for a given license key (e.g. invoice id) and id (e.g. email that was used for the purchase)
|
44
|
+
* @param {import('../types/license.js').License} license
|
45
|
+
* @returns {Promise<string | null>}
|
46
|
+
*/
|
47
|
+
module.exports.resolveLicense = async function (license) {
|
48
|
+
console.log("\n");
|
49
|
+
|
50
|
+
if (typeof license !== "object") {
|
51
|
+
return license;
|
52
|
+
}
|
53
|
+
|
54
|
+
if (!license.key) {
|
55
|
+
console.warn("WARN: License key is missing.");
|
56
|
+
return null;
|
57
|
+
}
|
58
|
+
else if (!license.id) {
|
59
|
+
console.warn("WARN: License id is missing.");
|
60
|
+
return null;
|
61
|
+
}
|
62
|
+
|
63
|
+
console.log("Resolve license for " + license.id + "::" + license.key);
|
64
|
+
const url = await fetch(LICENSE_ENDPOINT, { method: "GET" });
|
65
|
+
if (!url.ok) {
|
66
|
+
console.warn("WARN: Failed to fetch license URL from endpoint. " + url.statusText);
|
67
|
+
return null;
|
68
|
+
}
|
69
|
+
const str = await url.text();
|
70
|
+
if (!str) {
|
71
|
+
console.warn("WARN: Failed to fetch license URL from endpoint. " + url.statusText);
|
72
|
+
return null;
|
73
|
+
}
|
74
|
+
const licenseRequestUrl = `${str}?email=${license.id}&key=${license.key}&version=2`;
|
75
|
+
const req = await fetch(licenseRequestUrl, { method: "GET" });
|
76
|
+
if (!req.ok) {
|
77
|
+
console.warn("WARN: Failed to fetch license: " + req.statusText);
|
78
|
+
return null;
|
79
|
+
}
|
80
|
+
/** @type {{license:string}} */
|
81
|
+
const licenseResponse = await req.json();
|
82
|
+
console.log("\n");
|
83
|
+
if (licenseResponse.license) {
|
84
|
+
console.log(`INFO: Successfully received \"${licenseResponse.license?.toUpperCase()}\" license`)
|
85
|
+
return licenseResponse.license;
|
86
|
+
}
|
87
|
+
console.warn("WARN: Received invalid license.");
|
88
|
+
return null;
|
31
89
|
}
|
@@ -1,9 +1,13 @@
|
|
1
|
+
import { resolveLicense } from '../common/license.cjs';
|
1
2
|
import { loadConfig } from './config.js';
|
2
3
|
|
3
|
-
|
4
|
-
|
4
|
+
/**
|
5
|
+
* This plugin is used to apply the license to the needle engine.
|
6
|
+
* @param {string} command - The command that is being run
|
7
|
+
* @param {object} config - The config object
|
8
|
+
* @param {import('../types/userconfig.js').userSettings}
|
9
|
+
*/
|
5
10
|
export const needleLicense = (command, config, userSettings) => {
|
6
|
-
|
7
11
|
return {
|
8
12
|
name: "needle-license",
|
9
13
|
enforce: 'pre',
|
@@ -12,31 +16,48 @@
|
|
12
16
|
// sometimes the actual license parameter is in a unnamed chunk file
|
13
17
|
const isViteChunkFile = id.includes("chunk") && id.includes(".vite");
|
14
18
|
if (isNeedleEngineFile || isViteChunkFile) {
|
15
|
-
|
16
|
-
if (
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
console.log("Applying license: " + needleConfig.license);
|
21
|
-
}
|
22
|
-
const index = src.indexOf("NEEDLE_ENGINE_LICENSE_TYPE");
|
23
|
-
if (index >= 0) {
|
24
|
-
const end = src.indexOf(";", index);
|
25
|
-
if (end >= 0) {
|
26
|
-
const line = src.substring(index, end);
|
27
|
-
const replaced = "NEEDLE_ENGINE_LICENSE_TYPE = \"" + needleConfig.license + "\"";
|
28
|
-
src = src.replace(line, replaced);
|
29
|
-
return { code: src, map: null }
|
30
|
-
}
|
31
|
-
}
|
32
|
-
}
|
19
|
+
|
20
|
+
if (userSettings.license) {
|
21
|
+
// we only accept a license object here
|
22
|
+
if (typeof userSettings.license === "object")
|
23
|
+
await applyLicense(userSettings.license);
|
33
24
|
}
|
34
25
|
else {
|
35
|
-
|
36
|
-
|
37
|
-
|
26
|
+
const needleConfig = await loadConfig();
|
27
|
+
if (needleConfig) {
|
28
|
+
await applyLicense(needleConfig.license);
|
38
29
|
}
|
39
30
|
}
|
31
|
+
|
32
|
+
/**
|
33
|
+
* @param {import('../types/license.js').License | string} license
|
34
|
+
*/
|
35
|
+
async function applyLicense(license) {
|
36
|
+
|
37
|
+
if (!license) {
|
38
|
+
return;
|
39
|
+
}
|
40
|
+
|
41
|
+
// TODO: remove allowing to apply a string license here
|
42
|
+
|
43
|
+
if (typeof license !== "string") {
|
44
|
+
license = await resolveLicense(license).catch(err => {
|
45
|
+
console.error("Error resolving license", err.message);
|
46
|
+
return null;
|
47
|
+
});
|
48
|
+
}
|
49
|
+
|
50
|
+
const index = src.indexOf("NEEDLE_ENGINE_LICENSE_TYPE");
|
51
|
+
if (index >= 0) {
|
52
|
+
const end = src.indexOf(";", index);
|
53
|
+
if (end >= 0) {
|
54
|
+
const line = src.substring(index, end);
|
55
|
+
const replaced = "NEEDLE_ENGINE_LICENSE_TYPE = \"" + license + "\"";
|
56
|
+
src = src.replace(line, replaced);
|
57
|
+
return { code: src, map: null }
|
58
|
+
}
|
59
|
+
}
|
60
|
+
}
|
40
61
|
}
|
41
62
|
}
|
42
63
|
}
|
@@ -4,7 +4,7 @@
|
|
4
4
|
import { Animator } from "../../../Animator.js";
|
5
5
|
import { GameObject } from "../../../Component.js";
|
6
6
|
import type { IUSDExporterExtension } from "../Extension.js";
|
7
|
-
import { buildMatrix, findStructuralNodesInBoneHierarchy, getPathToSkeleton,usdNumberFormatting as fn, USDObject } from "../ThreeUSDZExporter.js";
|
7
|
+
import { buildMatrix, findStructuralNodesInBoneHierarchy, getPathToSkeleton,usdNumberFormatting as fn, USDObject, USDWriter, USDZExporterContext } from "../ThreeUSDZExporter.js";
|
8
8
|
|
9
9
|
const debug = getParam("debugusdzanimation");
|
10
10
|
const debugSerialization = getParam("debugusdzanimationserialization");
|
@@ -491,7 +491,7 @@
|
|
491
491
|
this.model.addEventListener("serialize", this.callback);
|
492
492
|
}
|
493
493
|
|
494
|
-
skinnedMeshExport(writer, _context) {
|
494
|
+
skinnedMeshExport(writer: USDWriter, _context: USDZExporterContext) {
|
495
495
|
const model = this.model;
|
496
496
|
const dict = this.dict;
|
497
497
|
if (!model) return;
|
@@ -536,7 +536,7 @@
|
|
536
536
|
if (!assumedRoot) console.error("No bone parent found for skinned mesh during USDZ export", model.skinnedMesh);
|
537
537
|
boneAndInverse.sort((a, b) => getPathToSkeleton(a.bone, assumedRoot) > getPathToSkeleton(b.bone, assumedRoot) ? 1 : -1);
|
538
538
|
|
539
|
-
function createVector3TimeSampleLines_( values ) {
|
539
|
+
function createVector3TimeSampleLines_( values: Map<number, Vector3[]> ) {
|
540
540
|
|
541
541
|
const lines:string[] = []
|
542
542
|
for (const [frame, frameValues] of values) {
|
@@ -554,7 +554,7 @@
|
|
554
554
|
|
555
555
|
}
|
556
556
|
|
557
|
-
function createVector4TimeSampleLines_( rotations ) {
|
557
|
+
function createVector4TimeSampleLines_( rotations: Map<number, Quaternion[]> ) {
|
558
558
|
|
559
559
|
const lines:string[] = []
|
560
560
|
|
@@ -652,7 +652,7 @@
|
|
652
652
|
return times;
|
653
653
|
}
|
654
654
|
|
655
|
-
function createTimeSamplesObject_( data: TransformDataByObject, sortedComponentFrameNumbers: AnimationClipFrameTimes[], bones ) {
|
655
|
+
function createTimeSamplesObject_( data: TransformDataByObject, sortedComponentFrameNumbers: AnimationClipFrameTimes[], bones: Array<Object3D> ) {
|
656
656
|
const positionTimeSamples = new Map<number, Array<Vector3>>();
|
657
657
|
const quaternionTimeSamples = new Map<number, Array<Quaternion>>();
|
658
658
|
const scaleTimeSamples = new Map<number, Array<Vector3>>();
|
@@ -723,7 +723,7 @@
|
|
723
723
|
return lines.join( ', ' );
|
724
724
|
}
|
725
725
|
|
726
|
-
function getPerBoneTransformData( bones ): TransformDataByObject {
|
726
|
+
function getPerBoneTransformData( bones: Array<Object3D> ): TransformDataByObject {
|
727
727
|
|
728
728
|
const boneToTransformData = new Map<Object3D, TransformData[]>();
|
729
729
|
if (debug) {
|
@@ -749,17 +749,27 @@
|
|
749
749
|
return createTimeSamplesObject_( perBoneTransformData, sortedFrameNumbers, bones );
|
750
750
|
}
|
751
751
|
|
752
|
+
const sanitizeRestPose = _context.quickLookCompatible;
|
753
|
+
|
752
754
|
const rest: Array<Matrix4> = [];
|
753
755
|
const translations: Array<Vector3> = [];
|
754
756
|
const rotations: Array<Quaternion> = [];
|
755
757
|
const scales: Array<Vector3> = [];
|
756
758
|
for ( const { bone } of boneAndInverse ){
|
757
759
|
|
758
|
-
|
760
|
+
// Workaround for FB13808839: Rest poses must be decomposable in QuickLook
|
761
|
+
if (sanitizeRestPose) {
|
762
|
+
const scale = bone.scale;
|
763
|
+
if (scale.x == 0) scale.x = 0.00001;
|
764
|
+
if (scale.y == 0) scale.y = 0.00001;
|
765
|
+
if (scale.z == 0) scale.z = 0.00001;
|
766
|
+
rest.push( new Matrix4().compose( bone.position, bone.quaternion, bone.scale )) ;
|
767
|
+
} else {
|
768
|
+
rest.push( bone.matrix.clone() );
|
769
|
+
}
|
759
770
|
translations.push( bone.position ) ;
|
760
771
|
rotations.push( bone.quaternion );
|
761
772
|
scales.push( bone.scale );
|
762
|
-
|
763
773
|
}
|
764
774
|
|
765
775
|
const bonesArray = boneAndInverse.map( x => "\"" + getPathToSkeleton(x.bone, assumedRoot) + "\"" ).join( ', ' );
|
@@ -844,7 +854,7 @@
|
|
844
854
|
|
845
855
|
|
846
856
|
|
847
|
-
onSerialize(writer, _context) {
|
857
|
+
onSerialize(writer: USDWriter, _context: USDZExporterContext) {
|
848
858
|
if (!this.model) return;
|
849
859
|
|
850
860
|
// Workaround: Sanitize TransformData for this object.
|
@@ -108,15 +108,21 @@
|
|
108
108
|
leftObj.quaternion.multiply(flipForwardQuaternion);
|
109
109
|
leftObj.visible = leftCtrl.isTracking;
|
110
110
|
}
|
111
|
+
else if (leftObj && leftObj.visible) {
|
112
|
+
leftObj.visible = false;
|
113
|
+
}
|
111
114
|
|
112
115
|
const right = args.xr.rightController;
|
113
|
-
|
114
|
-
|
116
|
+
const rightObj = this.rightHand?.asset as Object3D;
|
117
|
+
if (right && rightObj) {
|
115
118
|
rightObj.position.copy(right.gripPosition);
|
116
119
|
rightObj.quaternion.copy(right.gripQuaternion);
|
117
120
|
rightObj.quaternion.multiply(flipForwardQuaternion);
|
118
121
|
rightObj.visible = right.isTracking;
|
119
122
|
}
|
123
|
+
else if (rightObj && rightObj.visible) {
|
124
|
+
rightObj.visible = false;
|
125
|
+
}
|
120
126
|
}
|
121
127
|
|
122
128
|
onBeforeRender(): void {
|
@@ -1070,6 +1070,12 @@
|
|
1070
1070
|
const buffers = world.debugRender();
|
1071
1071
|
this.lines.geometry.setAttribute('position', new BufferAttribute(buffers.vertices, 3));
|
1072
1072
|
this.lines.geometry.setAttribute('color', new BufferAttribute(buffers.colors, 4));
|
1073
|
+
// If a scene has no colliders at all at the start of the scene
|
1074
|
+
// the bounding sphere radius will be 0 and the lines will not be rendered
|
1075
|
+
// so we need to update the bounding sphere (perhaps it's enough to do this once...)
|
1076
|
+
if (this.context!.time.frame % 30 === 0 || this.lines.geometry.boundingSphere?.radius === 0) {
|
1077
|
+
this.lines.geometry.computeBoundingSphere();
|
1078
|
+
}
|
1073
1079
|
}
|
1074
1080
|
else {
|
1075
1081
|
if (this.lines) {
|
@@ -465,9 +465,10 @@
|
|
465
465
|
const indexString = val.substring("/textures/".length);
|
466
466
|
const texIndex = Number.parseInt(indexString);
|
467
467
|
if (texIndex >= 0) {
|
468
|
-
|
468
|
+
const tex = await this.parser.getDependency("texture", texIndex);
|
469
469
|
if (tex instanceof Texture) {
|
470
|
-
|
470
|
+
// TODO: if we clone the texture here then progressive textures won't find it (and at this point there's no LOD userdata assigned yet) so the texture will not be loaded.
|
471
|
+
// tex = tex.clone();
|
471
472
|
tex.colorSpace = LinearSRGBColorSpace;
|
472
473
|
tex.needsUpdate = true;
|
473
474
|
}
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { isDevEnvironment } from "../../debug/index.js";
|
2
2
|
import type { Context } from "../../engine_context.js";
|
3
|
-
import { hasProLicense, onLicenseCheckResultChanged } from "../../engine_license.js";
|
3
|
+
import { hasCommercialLicense, hasProLicense, onLicenseCheckResultChanged } from "../../engine_license.js";
|
4
4
|
import { isLocalNetwork } from "../../engine_networking_utils.js";
|
5
5
|
import { getParam } from "../../engine_utils.js";
|
6
6
|
import { onXRSessionStart, XRSessionEventArgs } from "../../xr/events.js";
|
@@ -454,7 +454,7 @@
|
|
454
454
|
|
455
455
|
// if the user has a license then we CAN hide the needle logo
|
456
456
|
onLicenseCheckResultChanged(res => {
|
457
|
-
if (res == true &&
|
457
|
+
if (res == true && hasCommercialLicense() && !debugNonCommercial) {
|
458
458
|
this.logoContainer.style.display = this._userRequestedLogoVisible ? "" : "none";
|
459
459
|
}
|
460
460
|
});
|
@@ -487,7 +487,7 @@
|
|
487
487
|
// ensure the menu is not hidden or removed
|
488
488
|
const requiredParent = this?.parentNode;
|
489
489
|
if (this.style.display != "flex" || this.style.visibility != "visible" || this.style.opacity != "1" || requiredParent != this._domElement?.shadowRoot) {
|
490
|
-
if (!
|
490
|
+
if (!hasCommercialLicense()) {
|
491
491
|
const change = changeEventCounter++;
|
492
492
|
// if a user doesn't have a local pro license *but* for development the menu is hidden then we show a warning
|
493
493
|
if (isLocalNetwork() && this._userRequestedMenuVisible === false) {
|
@@ -539,7 +539,7 @@
|
|
539
539
|
showNeedleLogo(visible: boolean) {
|
540
540
|
this._userRequestedLogoVisible = visible;
|
541
541
|
if (!visible) {
|
542
|
-
if (!
|
542
|
+
if (!hasCommercialLicense() || debugNonCommercial) {
|
543
543
|
console.warn("Needle Menu: You need a PRO license to hide the Needle Engine logo.");
|
544
544
|
const localNetwork = isLocalNetwork()
|
545
545
|
if (!localNetwork) return;
|
@@ -1,3 +1,5 @@
|
|
1
|
+
import { License } from "./license";
|
2
|
+
|
1
3
|
export type needleMeta = {
|
2
4
|
needleEditor: string | null,
|
3
5
|
meta: string | null;
|
@@ -7,7 +9,7 @@
|
|
7
9
|
generator: string | null,
|
8
10
|
gzip: boolean,
|
9
11
|
allowHotReload: boolean,
|
10
|
-
license: string | null,
|
12
|
+
license: License | string | null | undefined,
|
11
13
|
useRapier: boolean,
|
12
14
|
developmentBuild: boolean,
|
13
15
|
}
|
@@ -696,49 +696,12 @@
|
|
696
696
|
}
|
697
697
|
|
698
698
|
private onBeforeRenderThree = (_renderer, _scene, _camera, _geometry, material, _group) => {
|
699
|
-
|
700
|
-
|
701
699
|
if (material.envMapIntensity !== undefined) {
|
702
700
|
const factor = this.hasLightmap ? Math.PI : 1;
|
703
701
|
const environmentIntensity = this.context.mainCameraComponent?.environmentIntensity ?? 1;
|
704
702
|
material.envMapIntensity = Math.max(0, environmentIntensity * this.context.sceneLighting.environmentIntensity / factor);
|
705
703
|
}
|
706
704
|
|
707
|
-
// if (this._reflectionProbe?.texture) {
|
708
|
-
// material.envMap = this._reflectionProbe.texture;
|
709
|
-
// // this.context.renderer.prop
|
710
|
-
// // console.log(material.name);
|
711
|
-
// // this.context.renderer.properties.get(material);
|
712
|
-
// // this.context.renderer.properties.update(material, "environment", this._reflectionProbe.texture);
|
713
|
-
// }
|
714
|
-
|
715
|
-
// _scene.environment = null;
|
716
|
-
// else _scene.environment = Renderer.envmap;
|
717
|
-
// if (!material.envmap)
|
718
|
-
// material.envMap = Renderer.envmap;
|
719
|
-
// material.needsUpdate = true;
|
720
|
-
|
721
|
-
// if (!camera) {
|
722
|
-
// let isXRCamera = false;
|
723
|
-
// if (this.context.isInXR) {
|
724
|
-
// // @ts-ignore
|
725
|
-
// const arr = this.context.renderer.xr.getCamera() as ArrayCamera;
|
726
|
-
// if (arr.cameras?.length > 0) {
|
727
|
-
// camera = arr;
|
728
|
-
// isXRCamera = true;
|
729
|
-
// }
|
730
|
-
// }
|
731
|
-
// }
|
732
|
-
|
733
|
-
// if (this.customShaderHandler) {
|
734
|
-
// this.customShaderHandler.onBeforeRender(renderer, scene, camera, geometry, material, group);
|
735
|
-
// }
|
736
|
-
// else if (this.rawShaderHandler) {
|
737
|
-
// for (const h of this.rawShaderHandler) {
|
738
|
-
// h.onBeforeRender(this.gameObject, camera);
|
739
|
-
// }
|
740
|
-
// }
|
741
|
-
|
742
705
|
if (this._lightmaps) {
|
743
706
|
for (const lm of this._lightmaps) {
|
744
707
|
lm.updateLightmapUniforms(material);
|
@@ -1,4 +1,5 @@
|
|
1
|
-
import {
|
1
|
+
import { NEEDLE_progressive } from "@needle-tools/gltf-progressive";
|
2
|
+
import { Material, Mesh, MeshPhysicalMaterial, ShaderMaterial, Texture, Vector4, WebGLProgramParametersWithUniforms } from "three";
|
2
3
|
|
3
4
|
import type { Context } from "../engine/engine_setup.js";
|
4
5
|
import { getParam } from "../engine/engine_utils.js";
|
@@ -42,6 +43,9 @@
|
|
42
43
|
if (this.lightmapIndex < 0) return;
|
43
44
|
this.lightmapScaleOffset = lightmapScaleOffset;
|
44
45
|
this.lightmapTexture = lightmapTexture;
|
46
|
+
NEEDLE_progressive.assignTextureLOD(this.lightmapTexture, 0).then(res => {
|
47
|
+
if (res instanceof Texture) this.lightmapTexture = res;
|
48
|
+
})
|
45
49
|
if (debug == "show") {
|
46
50
|
console.log("Lightmap:", this.gameObject.name, lightmapIndex, "\nScaleOffset:", lightmapScaleOffset, "\nTexture:", lightmapTexture)
|
47
51
|
this.setLightmapDebugMaterial();
|
@@ -107,6 +111,9 @@
|
|
107
111
|
|
108
112
|
private ensureLightmapMaterial(material: Material) {
|
109
113
|
if (!material.userData) material.userData = {};
|
114
|
+
// if (material instanceof MeshPhysicalMaterial) {
|
115
|
+
// return material;
|
116
|
+
// }
|
110
117
|
// check if the material version has changed and only then clone the material
|
111
118
|
if (material["NEEDLE:lightmap-material-version"] != material.version) {
|
112
119
|
if (material["NEEDLE:lightmap-material-version"] == undefined) {
|
@@ -124,11 +131,20 @@
|
|
124
131
|
|
125
132
|
private assignLightmapTexture(material: MaterialWithLightmap) {
|
126
133
|
if (!material) return;
|
134
|
+
if (material instanceof MeshPhysicalMaterial && material.transmission > 0) {
|
135
|
+
return;
|
136
|
+
}
|
137
|
+
const hasChanged = material.lightMap !== this.lightmapTexture || material["NEEDLE:lightmap-material-version"] !== material.version;
|
138
|
+
if (!hasChanged) {
|
139
|
+
return;
|
140
|
+
}
|
141
|
+
|
142
|
+
if (debug) console.log("Assigning lightmap", material.name, material.version);
|
143
|
+
|
127
144
|
// assign the lightmap
|
128
145
|
material.lightMap = this.lightmapTexture;
|
129
146
|
// store the version of the material
|
130
147
|
material["NEEDLE:lightmap-material-version"] = material.version;
|
131
|
-
// mat.needsUpdate = true;
|
132
148
|
}
|
133
149
|
|
134
150
|
private onBeforeCompile = (shader: WebGLProgramParametersWithUniforms, _) => {
|
@@ -10,24 +10,50 @@
|
|
10
10
|
|
11
11
|
const debug = getParam("debugscreensharing");
|
12
12
|
|
13
|
+
/**
|
14
|
+
* ScreenCapture component allows you to share your screen, camera or microphone with other users in the networked room.
|
15
|
+
*/
|
13
16
|
export enum ScreenCaptureDevice {
|
17
|
+
/**
|
18
|
+
* Capture the screen of the user.
|
19
|
+
*/
|
14
20
|
Screen = 0,
|
21
|
+
/**
|
22
|
+
* Capture the camera of the user.
|
23
|
+
*/
|
15
24
|
Camera = 1,
|
16
25
|
/** Please note that canvas streaming might not work reliably on chrome: https://bugs.chromium.org/p/chromium/issues/detail?id=1156408 */
|
17
26
|
Canvas = 2,
|
18
|
-
/** When using Microphone only the voice will be
|
27
|
+
/** When using Microphone only the voice will be send */
|
19
28
|
Microphone = 3
|
20
29
|
}
|
21
30
|
|
31
|
+
/**
|
32
|
+
* {@link ScreenCapture} allows you to share your screen, camera or microphone with other users in the networked room.
|
33
|
+
*/
|
34
|
+
declare type ScreenCaptureDeviceTypes = keyof typeof ScreenCaptureDevice;
|
35
|
+
|
36
|
+
/**
|
37
|
+
* The current mode of the {@link ScreenCapture} component.
|
38
|
+
*/
|
22
39
|
export enum ScreenCaptureMode {
|
23
40
|
Idle = 0,
|
24
41
|
Sending = 1,
|
25
42
|
Receiving = 2
|
26
43
|
}
|
27
44
|
|
28
|
-
|
29
|
-
|
30
|
-
|
45
|
+
/**
|
46
|
+
* Options for the {@link ScreenCapture} component when starting to share a stream by calling the {@link ScreenCapture.share}.
|
47
|
+
*/
|
48
|
+
export declare type ScreenCaptureOptions = {
|
49
|
+
/**
|
50
|
+
* You can specify the device type to capture (e.g. Screen, Camera, Microphone)
|
51
|
+
*/
|
52
|
+
device?: ScreenCaptureDeviceTypes,
|
53
|
+
/**
|
54
|
+
* Constraints for the media stream like resolution, frame rate, etc.
|
55
|
+
* @see https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamConstraints
|
56
|
+
*/
|
31
57
|
constraints?: MediaTrackConstraints,
|
32
58
|
/** Filter video device by id. Alternatively pass in a deviceFilter callback to manually filter available devices */
|
33
59
|
deviceId?: string,
|
@@ -35,12 +61,23 @@
|
|
35
61
|
deviceFilter?: (device: MediaDeviceInfo) => boolean,
|
36
62
|
}
|
37
63
|
|
38
|
-
/**
|
64
|
+
/**
|
65
|
+
* The ScreenCapture component allows you to share your screen, camera or microphone with other users in the networked room.
|
66
|
+
* When the stream is active the video will be displayed on the VideoPlayer component attached to the same GameObject.
|
67
|
+
*
|
68
|
+
* Note: For debugging append `?debugscreensharing` to the URL to see more information in the console.
|
69
|
+
*
|
70
|
+
* By default the component will start sharing the screen when the user clicks on the object this component is attached to. You can set {@link device} This behaviour can be disabled by setting `allowStartOnClick` to false.
|
71
|
+
* It is also possible to start the stream manually from your code by calling the {@link share} method.
|
72
|
+
*
|
73
|
+
*/
|
39
74
|
export class ScreenCapture extends Behaviour implements IPointerClickHandler {
|
40
75
|
|
41
76
|
/**
|
42
77
|
* When enabled the stream will start when the user clicks on the object this component is attached to
|
43
|
-
* It is also possible to start the stream manually from your code by calling the share
|
78
|
+
* It is also possible to start the stream manually from your code by calling the {@link share} method
|
79
|
+
* To modify what type of device is shared you can set the {@link device} property.
|
80
|
+
* @default true
|
44
81
|
*/
|
45
82
|
@serializable()
|
46
83
|
allowStartOnClick: boolean = true;
|
@@ -81,7 +118,9 @@
|
|
81
118
|
@serializable()
|
82
119
|
autoConnect: boolean = false;
|
83
120
|
|
84
|
-
|
121
|
+
/**
|
122
|
+
* If a VideoPlayer component is assigned to this property the video will be displayed on the VideoPlayer component.
|
123
|
+
*/
|
85
124
|
@serializable(VideoPlayer)
|
86
125
|
set videoPlayer(val: VideoPlayer | undefined) {
|
87
126
|
if (this._videoPlayer && (this.isSending || this.isReceiving)) {
|
@@ -96,14 +135,37 @@
|
|
96
135
|
private _videoPlayer?: VideoPlayer;
|
97
136
|
private _audioSource?: AudioSource;
|
98
137
|
|
138
|
+
/**
|
139
|
+
* When enabled the video will be displayed in the screenspace of the VideoPlayer component.
|
140
|
+
*/
|
99
141
|
get screenspace() { return this.videoPlayer?.screenspace ?? false; }
|
100
142
|
set screenspace(v: boolean) { if (this.videoPlayer) this.videoPlayer.screenspace = v; }
|
101
143
|
|
102
|
-
|
103
|
-
|
144
|
+
/**
|
145
|
+
* Which streaming device type should be used when starting to share (if {@link share} is called without a device option). Options are Screen, Camera, Microphone.
|
146
|
+
* This is e.g. used if `allowStartOnClick` is enabled and the user clicks on the object.
|
147
|
+
* @default Screen
|
148
|
+
*/
|
104
149
|
@serializable()
|
105
|
-
device:
|
150
|
+
device: ScreenCaptureDeviceTypes = "Screen";
|
106
151
|
|
152
|
+
/**
|
153
|
+
* If assigned the device the device will be selected by this id or label when starting to share.
|
154
|
+
* Note: This is only supported for `Camera` devices
|
155
|
+
*/
|
156
|
+
@serializable()
|
157
|
+
deviceName?: string;
|
158
|
+
|
159
|
+
/**
|
160
|
+
* Filter which device should be chosen for sharing by id or label.
|
161
|
+
* Assign a method to this property to manually filter the available devices.
|
162
|
+
*/
|
163
|
+
deviceFilter?: (device: MediaDeviceInfo) => boolean;
|
164
|
+
|
165
|
+
/**
|
166
|
+
* the current stream that is being shared or received
|
167
|
+
* @link https://developer.mozilla.org/en-US/docs/Web/API/MediaStream
|
168
|
+
*/
|
107
169
|
get currentScream(): MediaStream | null {
|
108
170
|
return this._currentStream;
|
109
171
|
}
|
@@ -111,9 +173,15 @@
|
|
111
173
|
return this._currentMode;
|
112
174
|
}
|
113
175
|
|
176
|
+
/**
|
177
|
+
* @returns true if the component is currently sending a stream
|
178
|
+
*/
|
114
179
|
get isSending() {
|
115
180
|
return this._currentStream?.active && this._currentMode === ScreenCaptureMode.Sending;
|
116
181
|
}
|
182
|
+
/**
|
183
|
+
* @returns true if the component is currently receiving a stream
|
184
|
+
*/
|
117
185
|
get isReceiving() {
|
118
186
|
if (this._currentMode === ScreenCaptureMode.Receiving) {
|
119
187
|
if (!this._currentStream || this._currentStream.active === false) return false;
|
@@ -126,8 +194,9 @@
|
|
126
194
|
return false;
|
127
195
|
}
|
128
196
|
|
129
|
-
private get requiresVideoPlayer() {
|
130
|
-
|
197
|
+
private get requiresVideoPlayer() {
|
198
|
+
return this.device !== "Microphone";
|
199
|
+
}
|
131
200
|
private _net?: NetworkedStreams;
|
132
201
|
private _requestOpen: boolean = false;
|
133
202
|
private _currentStream: MediaStream | null = null;
|
@@ -135,6 +204,11 @@
|
|
135
204
|
|
136
205
|
/** @internal */
|
137
206
|
awake() {
|
207
|
+
// Resolve the device type if it is a number
|
208
|
+
if (typeof this.device === "number") {
|
209
|
+
this.device = ScreenCaptureDevice[this.device] as ScreenCaptureDeviceTypes;
|
210
|
+
}
|
211
|
+
|
138
212
|
if (debug)
|
139
213
|
console.log("Screensharing", this.name, this);
|
140
214
|
AudioSource.registerWaitForAllowAudio(() => {
|
@@ -251,12 +325,12 @@
|
|
251
325
|
|
252
326
|
switch (this.device) {
|
253
327
|
// Capture a connected camera
|
254
|
-
case
|
328
|
+
case "Camera":
|
255
329
|
this.tryShareUserCamera(displayMediaOptions, opts);
|
256
330
|
break;
|
257
331
|
|
258
332
|
// capture any screen, will show a popup
|
259
|
-
case
|
333
|
+
case "Screen":
|
260
334
|
{
|
261
335
|
if (!navigator.mediaDevices.getDisplayMedia) {
|
262
336
|
console.error("No getDisplayMedia support");
|
@@ -271,7 +345,7 @@
|
|
271
345
|
break;
|
272
346
|
|
273
347
|
// capture the canvas meaning the threejs view
|
274
|
-
case
|
348
|
+
case "Canvas":
|
275
349
|
// looks like this doesnt work reliably on chrome https://stackoverflow.com/a/66848674
|
276
350
|
// firefox updates fine
|
277
351
|
// https://bugs.chromium.org/p/chromium/issues/detail?id=1156408
|
@@ -280,7 +354,7 @@
|
|
280
354
|
this.setStream(stream, ScreenCaptureMode.Sending);
|
281
355
|
break;
|
282
356
|
|
283
|
-
case
|
357
|
+
case "Microphone":
|
284
358
|
{
|
285
359
|
if (!navigator.mediaDevices.getUserMedia) {
|
286
360
|
console.error("No getDisplayMedia support");
|
@@ -295,6 +369,9 @@
|
|
295
369
|
}
|
296
370
|
break
|
297
371
|
|
372
|
+
default:
|
373
|
+
console.error("Can not start screen sharing: Unknown device type", this.device);
|
374
|
+
|
298
375
|
}
|
299
376
|
} catch (err: any) {
|
300
377
|
if (err.name === "NotAllowedError") {
|
@@ -330,7 +407,7 @@
|
|
330
407
|
this._requestOpen = true;
|
331
408
|
this._currentMode = mode;
|
332
409
|
|
333
|
-
const isVideoStream = this.device !==
|
410
|
+
const isVideoStream = this.device !== "Microphone";
|
334
411
|
const isSending = mode === ScreenCaptureMode.Sending;
|
335
412
|
|
336
413
|
if (isVideoStream) {
|
@@ -348,7 +425,7 @@
|
|
348
425
|
this.gameObject.addComponent(this._audioSource);
|
349
426
|
}
|
350
427
|
if (!isSending) {
|
351
|
-
if(debug) console.log("PLAY", stream.getAudioTracks())
|
428
|
+
if (debug) console.log("PLAY", stream.getAudioTracks())
|
352
429
|
this._audioSource.volume = 1;
|
353
430
|
this._audioSource?.play(stream);
|
354
431
|
}
|
@@ -394,7 +471,7 @@
|
|
394
471
|
|
395
472
|
|
396
473
|
|
397
|
-
private async tryShareUserCamera(
|
474
|
+
private async tryShareUserCamera(constraints: MediaStreamConstraints, options?: ScreenCaptureOptions) {
|
398
475
|
|
399
476
|
// let newWindow = open('', 'example', 'width=300,height=300');
|
400
477
|
// if (window) {
|
@@ -404,32 +481,88 @@
|
|
404
481
|
// TODO: allow user to select device
|
405
482
|
const devices = (await navigator.mediaDevices.enumerateDevices()).filter(d => d.kind === "videoinput");
|
406
483
|
if (debug)
|
407
|
-
console.log("Request camera", devices);
|
484
|
+
console.log("Request camera. These are your kind:videoinput devices:\n", devices);
|
485
|
+
|
486
|
+
let foundDevice = false;
|
487
|
+
|
408
488
|
for (const dev of devices) {
|
409
489
|
try {
|
410
|
-
if (!this._requestOpen)
|
411
|
-
|
490
|
+
if (!this._requestOpen) {
|
491
|
+
if (debug) console.log("Camera selection cancelled");
|
492
|
+
break;
|
493
|
+
}
|
494
|
+
|
495
|
+
if (dev.kind !== "videoinput") {
|
496
|
+
if (debug) console.log("Skipping non-video device", dev);
|
497
|
+
continue;
|
498
|
+
}
|
499
|
+
|
412
500
|
const id = dev.deviceId;
|
413
|
-
|
414
|
-
|
501
|
+
|
502
|
+
// If the share method is called with filter options then those should be used
|
503
|
+
const hasOptionsFilter = options?.deviceId != undefined || options?.deviceFilter != undefined;
|
504
|
+
if (hasOptionsFilter) {
|
505
|
+
if (options?.deviceId !== undefined) {
|
506
|
+
if (id !== options.deviceId) {
|
507
|
+
if (debug) console.log("Skipping device due to options.deviceId: " + dev.label + "; " + dev.deviceId);
|
508
|
+
continue;
|
509
|
+
}
|
510
|
+
}
|
511
|
+
if (options?.deviceFilter) {
|
512
|
+
const useDevice = options.deviceFilter(dev);
|
513
|
+
if (useDevice === false) {
|
514
|
+
if (debug) console.log("Skipping device due to options.deviceFilter: " + dev.label + "; " + dev.deviceId);
|
515
|
+
continue;
|
516
|
+
}
|
517
|
+
}
|
518
|
+
}
|
519
|
+
// If the share method was called without filter options then the component filter should be used
|
520
|
+
else if (this.deviceFilter) {
|
521
|
+
const useDevice = this.deviceFilter(dev);
|
522
|
+
if (useDevice === false) {
|
523
|
+
if (debug) console.log("Skipping device due to ScreenShare.deviceFilter: " + dev.label + "; " + dev.deviceId);
|
415
524
|
continue;
|
525
|
+
}
|
526
|
+
else if(debug)
|
527
|
+
console.log("Selected device by filter", dev);
|
416
528
|
}
|
417
|
-
|
418
|
-
|
529
|
+
else if (this.deviceName) {
|
530
|
+
const lowercaseLabel = dev.label.toLowerCase();
|
531
|
+
const lowercaseName = this.deviceName.toLowerCase();
|
532
|
+
const labelMatches = lowercaseLabel.includes(lowercaseName);
|
533
|
+
const idMatches = dev.deviceId === this.deviceName;
|
534
|
+
if (!labelMatches && !idMatches) {
|
535
|
+
if (debug) console.log("Skipping device due to ScreenShare.deviceName: " + dev.label + "; " + dev.deviceId);
|
536
|
+
continue;
|
537
|
+
}
|
538
|
+
else if(debug) console.log("Selected device by name", dev);
|
539
|
+
}
|
419
540
|
|
420
|
-
if (
|
421
|
-
if (typeof
|
422
|
-
|
541
|
+
if (constraints.video !== false) {
|
542
|
+
if (typeof constraints.video === "undefined" || typeof constraints.video === "boolean") {
|
543
|
+
constraints.video = {};
|
423
544
|
}
|
424
|
-
|
545
|
+
constraints.video.deviceId = id;
|
425
546
|
}
|
426
|
-
|
427
|
-
|
547
|
+
|
548
|
+
foundDevice = true;
|
549
|
+
const userMedia = await navigator.mediaDevices.getUserMedia(constraints).catch(err => {
|
550
|
+
console.error("Failed to get user media", err);
|
551
|
+
return null;
|
552
|
+
})
|
553
|
+
if (userMedia === null) {
|
554
|
+
continue;
|
555
|
+
}
|
556
|
+
else if (this._requestOpen) {
|
428
557
|
this.setStream(userMedia, ScreenCaptureMode.Sending);
|
558
|
+
if (debug)
|
559
|
+
console.log("Selected camera", dev);
|
429
560
|
}
|
430
|
-
else
|
431
|
-
|
432
|
-
|
561
|
+
else {
|
562
|
+
disposeStream(userMedia);
|
563
|
+
if (debug)
|
564
|
+
console.log("Camera selection cancelled");
|
565
|
+
}
|
433
566
|
break;
|
434
567
|
}
|
435
568
|
catch (err: any) {
|
@@ -444,6 +577,11 @@
|
|
444
577
|
}
|
445
578
|
}
|
446
579
|
}
|
580
|
+
|
581
|
+
if(!foundDevice && isDevEnvironment()){
|
582
|
+
showBalloonWarning("No camera found for sharing. Please connect a camera (see console for more information)");
|
583
|
+
console.warn("No camera found for sharing. Please connect a camera", devices, this.deviceName, "Using deviceFilter? " + this.deviceFilter != undefined, "Using options? " + options != undefined, "Using deviceName? " + this.deviceName != undefined, "Using options.deviceId? " + options?.deviceId != undefined, "Using options.deviceFilter? " + options?.deviceFilter != undefined);
|
584
|
+
}
|
447
585
|
}
|
448
586
|
// private _cameraSelectionWindow : Window | null = null;
|
449
587
|
// private openWindowToSelectCamera(){
|
@@ -1467,9 +1467,6 @@
|
|
1467
1467
|
elementSize = 4
|
1468
1468
|
interpolation = "vertex"
|
1469
1469
|
)` : '' }
|
1470
|
-
${hasBones ?
|
1471
|
-
//`uniform token[] skel:blendShapes
|
1472
|
-
`uniform token[] skel:joints = [${bonesArray}]` : '' }
|
1473
1470
|
uniform token subdivisionScheme = "none"
|
1474
1471
|
}
|
1475
1472
|
}
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import { License } from "./license";
|
1
2
|
import { NeedlePWAOptions } from "./webmanifest.d.ts";
|
2
3
|
|
3
4
|
export type needleModules = {
|
@@ -26,7 +27,7 @@
|
|
26
27
|
noReload: boolean;
|
27
28
|
/** Set to false to disable hot reload for the needle plugin. */
|
28
29
|
allowHotReload: boolean;
|
29
|
-
|
30
|
+
|
30
31
|
noCodegenTransform: boolean;
|
31
32
|
noFacebookInstantGames: boolean;
|
32
33
|
/** Set to true to create an imports.log file that shows all module imports. The file is generated when stopping the server. */
|
@@ -55,5 +56,10 @@
|
|
55
56
|
pwa?: undefined;
|
56
57
|
|
57
58
|
/** used by nextjs config to forward the webpack module */
|
58
|
-
modules: needleModules
|
59
|
+
modules: needleModules;
|
60
|
+
|
61
|
+
/**
|
62
|
+
* Use to activate a needle engine license
|
63
|
+
*/
|
64
|
+
license?: License;
|
59
65
|
}
|
@@ -0,0 +1,11 @@
|
|
1
|
+
|
2
|
+
|
3
|
+
/**
|
4
|
+
* Use to activate a needle engine license
|
5
|
+
*/
|
6
|
+
export type License = {
|
7
|
+
/** this is the email you bought the license with */
|
8
|
+
id: string,
|
9
|
+
/** this is one of the invoice IDs for an active subscription */
|
10
|
+
key: string,
|
11
|
+
}
|