Explorar el Código

Reworked motion blur post-process to include screen based mode.

julien-moreau hace 4 años
padre
commit
2d801f51a0

+ 1 - 0
dist/preview release/what's new.md

@@ -293,6 +293,7 @@
 
 - SSAO 2, motion blur and screen space reflections are now using the brand new `PrePassRenderer` to avoid rendering the scene twice ([CraigFeldpsar](https://github.com/craigfeldspar)
 - Added Screen Space Curvature post process: [Doc](https://doc.babylonjs.com/how_to/how_to_use_postprocesses#screen-space-curvature) ([Popov72](https://github.com/Popov72) and [Sebavan](https://github.com/sebavan/))
+- Added support of screen based motion blur in `MotionBlurPostProcess` (default mode is object based) to be used as a fallback to save performances on large scenes ([julien-moreau](https://github.com/julien-moreau))
 
 ## Bugs
 

+ 129 - 31
src/PostProcesses/motionBlurPostProcess.ts

@@ -1,6 +1,6 @@
 import { Nullable } from "../types";
 import { Logger } from "../Misc/logger";
-import { Vector2 } from "../Maths/math.vector";
+import { Matrix, Vector2 } from "../Maths/math.vector";
 import { Camera } from "../Cameras/camera";
 import { Effect } from "../Materials/effect";
 import { PostProcess, PostProcessOptions } from "./postProcess";
@@ -41,6 +41,7 @@ export class MotionBlurPostProcess extends PostProcess {
     /**
      * Gets the number of iterations are used for motion blur quality. Default value is equal to 32
      */
+    @serialize()
     public get motionBlurSamples(): number {
         return this._motionBlurSamples;
     }
@@ -50,18 +51,39 @@ export class MotionBlurPostProcess extends PostProcess {
      */
     public set motionBlurSamples(samples: number) {
         this._motionBlurSamples = samples;
+        this._updateEffect();
+    }
 
-        if (this._geometryBufferRenderer) {
-            this.updateEffect("#define GEOMETRY_SUPPORTED\n#define SAMPLES " + samples.toFixed(1));
+    private _motionBlurSamples: number = 32;
+
+    /**
+     * Gets wether or not the motion blur post-process is in object based mode.
+     */
+    @serialize()
+    public get isObjectBased(): boolean {
+        return this._isObjectBased;
+    }
+
+    /**
+     * Sets wether or not the motion blur post-process is in object based mode.
+     */
+    public set isObjectBased(value: boolean) {
+        if (this._isObjectBased === value) {
+            return;
         }
+
+        this._isObjectBased = value;
+        this._applyMode();
     }
 
-    @serialize("motionBlurSamples")
-    private _motionBlurSamples: number = 32;
+    private _isObjectBased: boolean = true;
 
     private _forceGeometryBuffer: boolean = false;
-    private _geometryBufferRenderer: Nullable<GeometryBufferRenderer>;
-    private _prePassRenderer: PrePassRenderer;
+    private _geometryBufferRenderer: Nullable<GeometryBufferRenderer> = null;
+    private _prePassRenderer: Nullable<PrePassRenderer> = null;
+
+    private _invViewProjection: Nullable<Matrix> = null;
+    private _previousViewProjection: Nullable<Matrix> = null;
 
     /**
      * Gets a string identifying the name of the class
@@ -85,7 +107,7 @@ export class MotionBlurPostProcess extends PostProcess {
      * @param forceGeometryBuffer If this post process should use geometry buffer instead of prepass (default: false)
      */
     constructor(name: string, scene: Scene, options: number | PostProcessOptions, camera: Nullable<Camera>, samplingMode?: number, engine?: Engine, reusable?: boolean, textureType: number = Constants.TEXTURETYPE_UNSIGNED_INT, blockCompilation = false, forceGeometryBuffer = true) {
-        super(name, "motionBlur", ["motionStrength", "motionScale", "screenSize"], ["velocitySampler"], options, camera, samplingMode, engine, reusable, "#define GEOMETRY_SUPPORTED\n#define SAMPLES 64.0", textureType, undefined, null, blockCompilation);
+        super(name, "motionBlur", ["motionStrength", "motionScale", "screenSize", "inverseViewProjection", "prevViewProjection"], ["velocitySampler"], options, camera, samplingMode, engine, reusable, "#define GEOMETRY_SUPPORTED\n#define SAMPLES 64.0\n#define OBJECT_BASED", textureType, undefined, null, blockCompilation);
 
         this._forceGeometryBuffer = forceGeometryBuffer;
 
@@ -97,31 +119,15 @@ export class MotionBlurPostProcess extends PostProcess {
                 this._geometryBufferRenderer.enableVelocity = true;
             }
         } else {
-            this._prePassRenderer = <PrePassRenderer>scene.enablePrePassRenderer();
-            this._prePassRenderer.markAsDirty();
-            this._prePassEffectConfiguration = new MotionBlurConfiguration();
-        }
+            this._prePassRenderer = scene.enablePrePassRenderer();
 
-        if (!this._geometryBufferRenderer && !this._prePassRenderer) {
-            // We can't get a velocity texture. So, work as a passthrough.
-            Logger.Warn("Multiple Render Target support needed to compute object based motion blur");
-            this.updateEffect();
-        } else {
-            this.onApply = (effect: Effect) => {
-                effect.setVector2("screenSize", new Vector2(this.width, this.height));
-
-                effect.setFloat("motionScale", scene.getAnimationRatio());
-                effect.setFloat("motionStrength", this.motionStrength);
-
-                if (this._geometryBufferRenderer) {
-                    const velocityIndex = this._geometryBufferRenderer.getTextureIndex(GeometryBufferRenderer.VELOCITY_TEXTURE_TYPE);
-                    effect.setTexture("velocitySampler", this._geometryBufferRenderer.getGBuffer().textures[velocityIndex]);
-                } else {
-                    const velocityIndex = this._prePassRenderer.getIndex(Constants.PREPASS_VELOCITY_TEXTURE_TYPE);
-                    effect.setTexture("velocitySampler", this._prePassRenderer.prePassRT.textures[velocityIndex]);
-                }
-            };
+            if (this._prePassRenderer) {
+                this._prePassRenderer.markAsDirty();
+                this._prePassEffectConfiguration = new MotionBlurConfiguration();
+            }
         }
+
+        this._applyMode();
     }
 
     /**
@@ -181,6 +187,98 @@ export class MotionBlurPostProcess extends PostProcess {
         super.dispose(camera);
     }
 
+    /**
+     * Called on the mode changed (object based or screen based).
+     */
+    private _applyMode(): void {
+        if (!this._geometryBufferRenderer && !this._prePassRenderer) {
+            // We can't get a velocity or depth texture. So, work as a passthrough.
+            Logger.Warn("Multiple Render Target support needed to compute object based motion blur");
+            return this.updateEffect();
+        }
+
+        this._updateEffect();
+
+        this._invViewProjection = null;
+        this._previousViewProjection = null;
+
+        if (this.isObjectBased) {
+            if (this._prePassRenderer && this._prePassEffectConfiguration) {
+                this._prePassEffectConfiguration.texturesRequired[0] = Constants.PREPASS_VELOCITY_TEXTURE_TYPE;
+            }
+
+            this.onApply = (effect: Effect) => this._onApplyObjectBased(effect);
+        } else {
+            this._invViewProjection = Matrix.Identity();
+            this._previousViewProjection = Matrix.Identity();
+
+            if (this._prePassRenderer && this._prePassEffectConfiguration) {
+                this._prePassEffectConfiguration.texturesRequired[0] = Constants.PREPASS_DEPTHNORMAL_TEXTURE_TYPE;
+            }
+
+            this.onApply = (effect: Effect) => this._onApplyScreenBased(effect);
+        }
+    }
+
+    /**
+     * Called on the effect is applied when the motion blur post-process is in object based mode.
+     */
+    private _onApplyObjectBased(effect: Effect): void {
+        effect.setVector2("screenSize", new Vector2(this.width, this.height));
+
+        effect.setFloat("motionScale", this._scene.getAnimationRatio());
+        effect.setFloat("motionStrength", this.motionStrength);
+
+        if (this._geometryBufferRenderer) {
+            const velocityIndex = this._geometryBufferRenderer.getTextureIndex(GeometryBufferRenderer.VELOCITY_TEXTURE_TYPE);
+            effect.setTexture("velocitySampler", this._geometryBufferRenderer.getGBuffer().textures[velocityIndex]);
+        } else if (this._prePassRenderer) {
+            const velocityIndex = this._prePassRenderer.getIndex(Constants.PREPASS_VELOCITY_TEXTURE_TYPE);
+            effect.setTexture("velocitySampler", this._prePassRenderer.prePassRT.textures[velocityIndex]);
+        }
+    }
+
+    /**
+     * Called on the effect is applied when the motion blur post-process is in screen based mode.
+     */
+    private _onApplyScreenBased(effect: Effect): void {
+        const viewProjection = this._scene.getProjectionMatrix().multiply(this._scene.getViewMatrix());
+
+        viewProjection.invertToRef(this._invViewProjection!);
+        effect.setMatrix("inverseViewProjection", this._invViewProjection!);
+
+        effect.setMatrix("prevViewProjection", this._previousViewProjection!);
+        this._previousViewProjection = viewProjection;
+
+        effect.setVector2("screenSize", new Vector2(this.width, this.height));
+
+        effect.setFloat("motionScale", this._scene.getAnimationRatio());
+        effect.setFloat("motionStrength", this.motionStrength);
+
+        if (this._geometryBufferRenderer) {
+            const depthIndex = this._geometryBufferRenderer.getTextureIndex(GeometryBufferRenderer.DEPTHNORMAL_TEXTURE_TYPE);
+            effect.setTexture("depthSampler", this._geometryBufferRenderer.getGBuffer().textures[depthIndex]);
+        } else if (this._prePassRenderer) {
+            const depthIndex = this._prePassRenderer.getIndex(Constants.PREPASS_DEPTHNORMAL_TEXTURE_TYPE);
+            effect.setTexture("depthSampler", this._prePassRenderer.prePassRT.textures[depthIndex]);
+        }
+    }
+
+    /**
+     * Called on the effect must be updated (changed mode, samples count, etc.).
+     */
+    private _updateEffect(): void {
+        if (this._geometryBufferRenderer || this._prePassRenderer) {
+            const defines: string[] = [
+                "#define GEOMETRY_SUPPORTED",
+                "#define SAMPLES " + this._motionBlurSamples.toFixed(1),
+                this._isObjectBased ? "#define OBJECT_BASED" : "#define SCREEN_BASED"
+            ];
+
+            this.updateEffect(defines.join("\n"));
+        }
+    }
+
     /** @hidden */
     public static _Parse(parsedPostProcess: any, targetCamera: Camera, scene: Scene, rootUrl: string): Nullable<MotionBlurPostProcess> {
         return SerializationHelper.Parse(() => {

+ 62 - 27
src/Shaders/motionBlur.fragment.fx

@@ -2,40 +2,75 @@
 varying vec2 vUV;
 
 uniform sampler2D textureSampler;
-uniform sampler2D velocitySampler;
-
 uniform float motionStrength;
 uniform float motionScale;
 uniform vec2 screenSize;
 
+#ifdef OBJECT_BASED
+uniform sampler2D velocitySampler;
+#else
+uniform sampler2D depthSampler;
+
+uniform mat4 inverseViewProjection;
+uniform mat4 prevViewProjection;
+#endif
+
 void main(void)
 {
     #ifdef GEOMETRY_SUPPORTED
-    vec2 texelSize = 1.0 / screenSize;
-    vec2 velocityColor = texture2D(velocitySampler, vUV).rg * 2.0 - 1.0;
-	
-    vec2 velocity = vec2(pow(velocityColor.r, 3.0), pow(velocityColor.g, 3.0));
-	velocity *= motionScale * motionStrength;
-
-    float speed = length(velocity / texelSize);
-    int samplesCount = int(clamp(speed, 1.0, SAMPLES));
-
-    velocity = normalize(velocity) * texelSize;
-    float hlim = float(-samplesCount) * 0.5 + 0.5;
-
-    vec4 result = texture2D(textureSampler, vUV);
-
-    for (int i = 1; i < int(SAMPLES); ++i)
-    {
-        if (i >= samplesCount)
-            break;
-        
-        vec2 offset = vUV + velocity * (hlim + float(i));
-        result += texture2D(textureSampler, offset);
-    }
-
-	gl_FragColor = result / float(samplesCount);
-    gl_FragColor.a = 1.0;
+        #ifdef OBJECT_BASED
+            vec2 texelSize = 1.0 / screenSize;
+            vec2 velocityColor = texture2D(velocitySampler, vUV).rg * 2.0 - 1.0;
+            
+            vec2 velocity = vec2(pow(velocityColor.r, 3.0), pow(velocityColor.g, 3.0));
+            velocity *= motionScale * motionStrength;
+
+            float speed = length(velocity / texelSize);
+            int samplesCount = int(clamp(speed, 1.0, SAMPLES));
+
+            velocity = normalize(velocity) * texelSize;
+            float hlim = float(-samplesCount) * 0.5 + 0.5;
+
+            vec4 result = texture2D(textureSampler, vUV);
+
+            for (int i = 1; i < int(SAMPLES); ++i)
+            {
+                if (i >= samplesCount)
+                    break;
+                
+                vec2 offset = vUV + velocity * (hlim + float(i));
+                result += texture2D(textureSampler, offset);
+            }
+
+            gl_FragColor = result / float(samplesCount);
+            gl_FragColor.a = 1.0;
+        #else
+            vec2 texelSize = 1.0 / screenSize;
+            float depth = texture2D(depthSampler, vUV).r;
+
+            vec4 cpos = vec4(vUV * 2.0 - 1.0, depth, 1.0);
+            cpos = cpos * inverseViewProjection;
+
+            vec4 ppos = cpos * prevViewProjection;
+            ppos.xyz /= ppos.w;
+            ppos.xy = ppos.xy * 0.5 + 0.5;
+
+            vec2 velocity = (ppos.xy - vUV) * motionScale * motionStrength;
+            float speed = length(velocity / texelSize);
+            int nSamples = int(clamp(speed, 1.0, SAMPLES));
+
+            vec4 result = texture2D(textureSampler, vUV);
+
+            for (int i = 1; i < int(SAMPLES); ++i) {
+                if (i >= nSamples)
+                    break;
+                
+                vec2 offset1 = vUV + velocity * (float(i) / float(nSamples - 1) - 0.5);
+                result += texture2D(textureSampler, offset1);
+            }
+
+            gl_FragColor = result / float(nSamples);
+        #endif
     #else
     gl_FragColor = texture2D(textureSampler, vUV);
     #endif