@@ -136,6 +136,15 @@
|
|
136
136
|
}
|
137
137
|
private _backgroundIntensity?: number;
|
138
138
|
|
139
|
+
@serializable()
|
140
|
+
public set environmentIntensity(val: number | undefined) {
|
141
|
+
this._environmentIntensity = val;
|
142
|
+
}
|
143
|
+
public get environmentIntensity(): number | undefined {
|
144
|
+
return this._environmentIntensity;
|
145
|
+
}
|
146
|
+
private _environmentIntensity?: number;
|
147
|
+
|
139
148
|
@serializable(RGBAColor)
|
140
149
|
public get backgroundColor(): RGBAColor | null {
|
141
150
|
return this._backgroundColor ?? null;
|
@@ -27,7 +27,7 @@
|
|
27
27
|
camInstance.fieldOfView = 35;
|
28
28
|
// TODO: can we store the backgroundBlurriness in the gltf file somewhere except inside the camera?
|
29
29
|
// e.g. when we export a scene from blender without a camera in the scene
|
30
|
-
camInstance.backgroundBlurriness = .5
|
30
|
+
camInstance.backgroundBlurriness = .125; // same as in blender 0.5
|
31
31
|
const cam = addNewComponent(cameraObject, camInstance, true) as ICamera;
|
32
32
|
|
33
33
|
cameraObject.position.x = 0;
|
@@ -205,6 +205,7 @@
|
|
205
205
|
farClipPlane: number;
|
206
206
|
backgroundColor: RGBAColor | null;
|
207
207
|
backgroundBlurriness: number | undefined;
|
208
|
+
environmentIntensity: number | undefined;
|
208
209
|
clearFlags: number;
|
209
210
|
cullingMask: number;
|
210
211
|
aspect: number;
|
@@ -622,7 +622,8 @@
|
|
622
622
|
|
623
623
|
if (material.envMapIntensity !== undefined) {
|
624
624
|
const factor = this.hasLightmap ? Math.PI : 1;
|
625
|
-
|
625
|
+
const environmentIntensity = this.context.mainCameraComponent?.environmentIntensity ?? 1;
|
626
|
+
material.envMapIntensity = Math.max(0, environmentIntensity * this.context.sceneLighting.environmentIntensity / factor);
|
626
627
|
}
|
627
628
|
// if (this._reflectionProbe?.texture) {
|
628
629
|
// material.envMap = this._reflectionProbe.texture;
|