Bläddra i källkod

Merge branch 'master' into stop-animation

Alejandro Toledo 5 år sedan
förälder
incheckning
a025a0e501

+ 4 - 0
dist/preview release/what's new.md

@@ -186,6 +186,10 @@
 
 - Added support for custom word splitting function for `TextBlock` ([Popov72](https://github.com/Popov72))
 
+### Post Processes
+
+- SSAO 2 is now using the brand new `PrePassRenderer` to avoid rendering the scene twice ([CraigFeldpsar](https://github.com/craigfeldspar)
+
 ## Bugs
 
 - Fix infinite loop in `GlowLayer.unReferenceMeshFromUsingItsOwnMaterial` ([Popov72](https://github.com/Popov72))

+ 20 - 0
src/Engines/constants.ts

@@ -484,4 +484,24 @@ export class Constants {
      * Detailled logging while loading
      */
     public static readonly SCENELOADER_DETAILED_LOGGING = 3;
+
+    /**
+     * Prepass texture index for color
+     */
+    public static readonly PREPASS_COLOR_INDEX = 0;
+
+    /**
+     * Prepass texture index for irradiance
+     */
+    public static readonly PREPASS_IRRADIANCE_INDEX = 1;
+
+    /**
+     * Prepass texture index for depth + normal
+     */
+    public static readonly PREPASS_DEPTHNORMAL_INDEX = 2;
+
+    /**
+     * Prepass texture index for albedo
+     */
+    public static readonly PREPASS_ALBEDO_INDEX = 3;
 }

+ 5 - 1
src/Materials/PBR/pbrBaseMaterial.ts

@@ -679,7 +679,11 @@ export abstract class PBRBaseMaterial extends PushMaterial {
      * Should this material render to several textures at once
      */
     public get shouldRenderToMRT() {
-        return this.subSurface.isScatteringEnabled;
+        const ppr = this.getScene().prePassRenderer;
+
+        return (!!ppr &&
+            (ppr.materialsShouldRenderGeometry ||
+            ppr.materialsShouldRenderIrradiance));
     }
 
     /**

+ 12 - 0
src/Materials/standardMaterial.ts

@@ -677,6 +677,14 @@ export class StandardMaterial extends PushMaterial {
     }
 
     /**
+     * Should this material render to several textures at once
+     */
+    public get shouldRenderToMRT() {
+        const ppr = this.getScene().prePassRenderer;
+        return (!!ppr && ppr.materialsShouldRenderGeometry);
+    }
+
+    /**
      * Defines the detail map parameters for the material.
      */
     public readonly detailMap = new DetailMapConfiguration(this._markAllSubMeshesAsTexturesDirty.bind(this));
@@ -825,6 +833,9 @@ export class StandardMaterial extends PushMaterial {
         // Multiview
         MaterialHelper.PrepareDefinesForMultiview(scene, defines);
 
+        // PrePass
+        MaterialHelper.PrepareDefinesForPrePass(scene, defines, this.shouldRenderToMRT);
+
         // Textures
         if (defines._areTexturesDirty) {
             defines._needUVs = false;
@@ -1207,6 +1218,7 @@ export class StandardMaterial extends PushMaterial {
                 onError: this.onError,
                 indexParameters: { maxSimultaneousLights: this._maxSimultaneousLights, maxSimultaneousMorphTargets: defines.NUM_MORPH_INFLUENCERS },
                 processFinalCode: csnrOptions.processFinalCode,
+                multiTarget: this.shouldRenderToMRT
             }, engine);
 
             if (effect) {

+ 61 - 21
src/PostProcesses/RenderPipeline/Pipelines/ssao2RenderingPipeline.ts

@@ -9,10 +9,11 @@ import { PostProcess } from "../../../PostProcesses/postProcess";
 import { PostProcessRenderPipeline } from "../../../PostProcesses/RenderPipeline/postProcessRenderPipeline";
 import { PostProcessRenderEffect } from "../../../PostProcesses/RenderPipeline/postProcessRenderEffect";
 import { PassPostProcess } from "../../../PostProcesses/passPostProcess";
-import { GeometryBufferRenderer } from "../../../Rendering/geometryBufferRenderer";
+import { PrePassRenderer } from "../../../Rendering/prePassRenderer";
 import { Scene } from "../../../scene";
 import { _TypeStore } from '../../../Misc/typeStore';
 import { EngineStore } from '../../../Engines/engineStore';
+import { Constants } from "../../../Engines/constants";
 
 import "../../../PostProcesses/RenderPipeline/postProcessRenderPipelineManagerSceneComponent";
 
@@ -75,8 +76,8 @@ export class SSAO2RenderingPipeline extends PostProcessRenderPipeline {
     * Number of samples used for the SSAO calculations. Default value is 8
     */
     public set samples(n: number) {
-        this._ssaoPostProcess.updateEffect("#define SAMPLES " + n + "\n#define SSAO");
         this._samples = n;
+        this._ssaoPostProcess.updateEffect(this._getDefinesForSSAO());
         this._sampleSphere = this._generateHemisphere();
     }
     public get samples(): number {
@@ -102,6 +103,11 @@ export class SSAO2RenderingPipeline extends PostProcessRenderPipeline {
     }
 
     /**
+     * Force rendering the geometry through geometry buffer
+     */
+    private _forceGeometryBuffer: boolean = false;
+
+    /**
      * Ratio object used for SSAO ratio and blur ratio
      */
     @serialize()
@@ -124,9 +130,9 @@ export class SSAO2RenderingPipeline extends PostProcessRenderPipeline {
     */
     public set expensiveBlur(b: boolean) {
         this._blurHPostProcess.updateEffect("#define BILATERAL_BLUR\n#define BILATERAL_BLUR_H\n#define SAMPLES 16\n#define EXPENSIVE " + (b ? "1" : "0") + "\n",
-            null, ["textureSampler", "depthSampler"]);
+            null, ["textureSampler", "depthNormalSampler"]);
         this._blurVPostProcess.updateEffect("#define BILATERAL_BLUR\n#define SAMPLES 16\n#define EXPENSIVE " + (b ? "1" : "0") + "\n",
-            null, ["textureSampler", "depthSampler"]);
+            null, ["textureSampler", "depthNormalSampler"]);
         this._expensiveBlur = b;
     }
 
@@ -159,16 +165,15 @@ export class SSAO2RenderingPipeline extends PostProcessRenderPipeline {
     }
 
     private _scene: Scene;
-    private _depthTexture: Texture;
-    private _normalTexture: Texture;
     private _randomTexture: DynamicTexture;
-
     private _originalColorPostProcess: PassPostProcess;
     private _ssaoPostProcess: PostProcess;
     private _blurHPostProcess: PostProcess;
     private _blurVPostProcess: PostProcess;
     private _ssaoCombinePostProcess: PostProcess;
 
+    private _prePassRenderer: PrePassRenderer;
+
     /**
      * Gets active scene
      */
@@ -182,12 +187,14 @@ export class SSAO2RenderingPipeline extends PostProcessRenderPipeline {
      * @param scene The scene linked to this pipeline
      * @param ratio The size of the postprocesses. Can be a number shared between passes or an object for more precision: { ssaoRatio: 0.5, blurRatio: 1.0 }
      * @param cameras The array of cameras that the rendering pipeline will be attached to
+     * @param forceGeometryBuffer Set to true if you want to use the legacy geometry buffer renderer
      */
-    constructor(name: string, scene: Scene, ratio: any, cameras?: Camera[]) {
+    constructor(name: string, scene: Scene, ratio: any, cameras?: Camera[], forceGeometryBuffer = false) {
         super(scene.getEngine(), name);
 
         this._scene = scene;
         this._ratio = ratio;
+        this._forceGeometryBuffer = forceGeometryBuffer;
 
         if (!this.isSupported) {
             Logger.Error("SSAO 2 needs WebGL 2 support.");
@@ -198,10 +205,14 @@ export class SSAO2RenderingPipeline extends PostProcessRenderPipeline {
         var blurRatio = this._ratio.blurRatio || ratio;
 
         // Set up assets
-        let geometryBufferRenderer = <GeometryBufferRenderer>scene.enableGeometryBufferRenderer();
+        if (this._forceGeometryBuffer) {
+            scene.enableGeometryBufferRenderer();
+        } else {
+            this._prePassRenderer = <PrePassRenderer>scene.enablePrePassRenderer();
+            this._prePassRenderer.markAsDirty();
+        }
+
         this._createRandomTexture();
-        this._depthTexture = geometryBufferRenderer.getGBuffer().textures[0];
-        this._normalTexture = geometryBufferRenderer.getGBuffer().textures[1];
 
         this._originalColorPostProcess = new PassPostProcess("SSAOOriginalSceneColor", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false);
         this._originalColorPostProcess.samples = this.textureSamples;
@@ -268,7 +279,7 @@ export class SSAO2RenderingPipeline extends PostProcessRenderPipeline {
             this._samplerOffsets.push(i * 2 + 0.5);
         }
 
-        this._blurHPostProcess = new PostProcess("BlurH", "ssao2", ["outSize", "samplerOffsets", "near", "far", "radius"], ["depthSampler"], ssaoRatio, null, Texture.TRILINEAR_SAMPLINGMODE, this._scene.getEngine(), false, "#define BILATERAL_BLUR\n#define BILATERAL_BLUR_H\n#define SAMPLES 16\n#define EXPENSIVE " + (expensive ? "1" : "0") + "\n");
+        this._blurHPostProcess = new PostProcess("BlurH", "ssao2", ["outSize", "samplerOffsets", "near", "far", "radius"], ["depthNormalSampler"], ssaoRatio, null, Texture.TRILINEAR_SAMPLINGMODE, this._scene.getEngine(), false, "#define BILATERAL_BLUR\n#define BILATERAL_BLUR_H\n#define SAMPLES 16\n#define EXPENSIVE " + (expensive ? "1" : "0") + "\n");
         this._blurHPostProcess.onApply = (effect: Effect) => {
             if (!this._scene.activeCamera) {
                 return;
@@ -278,11 +289,15 @@ export class SSAO2RenderingPipeline extends PostProcessRenderPipeline {
             effect.setFloat("near", this._scene.activeCamera.minZ);
             effect.setFloat("far", this._scene.activeCamera.maxZ);
             effect.setFloat("radius", this.radius);
-            effect.setTexture("depthSampler", this._depthTexture);
+            if (this._forceGeometryBuffer) {
+                effect.setTexture("depthNormalSampler", this._scene.enableGeometryBufferRenderer()!.getGBuffer().textures[0]);
+            } else {
+                effect.setTexture("depthNormalSampler", this._prePassRenderer.prePassRT.textures[Constants.PREPASS_DEPTHNORMAL_INDEX]);
+            }
             effect.setArray("samplerOffsets", this._samplerOffsets);
         };
 
-        this._blurVPostProcess = new PostProcess("BlurV", "ssao2", ["outSize", "samplerOffsets", "near", "far", "radius"], ["depthSampler"], blurRatio, null, Texture.TRILINEAR_SAMPLINGMODE, this._scene.getEngine(), false, "#define BILATERAL_BLUR\n#define BILATERAL_BLUR_V\n#define SAMPLES 16\n#define EXPENSIVE " + (expensive ? "1" : "0") + "\n");
+        this._blurVPostProcess = new PostProcess("BlurV", "ssao2", ["outSize", "samplerOffsets", "near", "far", "radius"], ["depthNormalSampler"], blurRatio, null, Texture.TRILINEAR_SAMPLINGMODE, this._scene.getEngine(), false, "#define BILATERAL_BLUR\n#define BILATERAL_BLUR_V\n#define SAMPLES 16\n#define EXPENSIVE " + (expensive ? "1" : "0") + "\n");
         this._blurVPostProcess.onApply = (effect: Effect) => {
             if (!this._scene.activeCamera) {
                 return;
@@ -292,7 +307,11 @@ export class SSAO2RenderingPipeline extends PostProcessRenderPipeline {
             effect.setFloat("near", this._scene.activeCamera.minZ);
             effect.setFloat("far", this._scene.activeCamera.maxZ);
             effect.setFloat("radius", this.radius);
-            effect.setTexture("depthSampler", this._depthTexture);
+            if (this._forceGeometryBuffer) {
+                effect.setTexture("depthNormalSampler", this._scene.enableGeometryBufferRenderer()!.getGBuffer().textures[0]);
+            } else {
+                effect.setTexture("depthNormalSampler", this._prePassRenderer.prePassRT.textures[Constants.PREPASS_DEPTHNORMAL_INDEX]);
+            }
             effect.setArray("samplerOffsets", this._samplerOffsets);
 
         };
@@ -352,21 +371,38 @@ export class SSAO2RenderingPipeline extends PostProcessRenderPipeline {
         return result;
     }
 
-    private _createSSAOPostProcess(ratio: number): void {
-        var numSamples = this.samples;
+    private _getDefinesForSSAO() {
+        let defines = "#define SAMPLES " + this.samples + "\n#define SSAO";
+
+        if (this._forceGeometryBuffer) {
+            defines = defines + "\n#define GEOMETRYBUFFER";
+        }
 
+        return defines;
+    }
+
+    private _createSSAOPostProcess(ratio: number): void {
         this._sampleSphere = this._generateHemisphere();
 
+        const defines = this._getDefinesForSSAO();
+        let samplers;
+
+        if (this._forceGeometryBuffer) {
+            samplers = ["randomSampler", "depthSampler", "normalSampler"];
+        } else {
+            samplers = ["randomSampler", "depthNormalSampler"];
+        }
+
         this._ssaoPostProcess = new PostProcess("ssao2", "ssao2",
             [
                 "sampleSphere", "samplesFactor", "randTextureTiles", "totalStrength", "radius",
                 "base", "range", "projection", "near", "far", "texelSize",
                 "xViewport", "yViewport", "maxZ", "minZAspect"
             ],
-            ["randomSampler", "normalSampler"],
+            samplers,
             ratio, null, Texture.BILINEAR_SAMPLINGMODE,
             this._scene.getEngine(), false,
-            "#define SAMPLES " + numSamples + "\n#define SSAO");
+            defines);
 
         this._ssaoPostProcess.onApply = (effect: Effect) => {
             if (!this._scene.activeCamera) {
@@ -388,8 +424,12 @@ export class SSAO2RenderingPipeline extends PostProcessRenderPipeline {
             effect.setFloat("yViewport", Math.tan(this._scene.activeCamera.fov / 2));
             effect.setMatrix("projection", this._scene.getProjectionMatrix());
 
-            effect.setTexture("textureSampler", this._depthTexture);
-            effect.setTexture("normalSampler", this._normalTexture);
+            if (this._forceGeometryBuffer) {
+                effect.setTexture("depthSampler", this._scene.enableGeometryBufferRenderer()!.getGBuffer().textures[0]);
+                effect.setTexture("normalSampler", this._scene.enableGeometryBufferRenderer()!.getGBuffer().textures[1]);
+            } else {
+                effect.setTexture("depthNormalSampler", this._prePassRenderer.prePassRT.textures[Constants.PREPASS_DEPTHNORMAL_INDEX]);
+            }
             effect.setTexture("randomSampler", this._randomTexture);
         };
         this._ssaoPostProcess.samples = this.textureSamples;

+ 9 - 1
src/PostProcesses/postProcess.ts

@@ -163,7 +163,7 @@ export class PostProcess {
     protected _indexParameters: any;
     private _shareOutputWithPostProcess: Nullable<PostProcess>;
     private _texelSize = Vector2.Zero();
-    private _forcedOutputTexture: InternalTexture;
+    private _forcedOutputTexture: Nullable<InternalTexture>;
 
     /**
      * Returns the fragment url or shader name used in the post process.
@@ -270,6 +270,14 @@ export class PostProcess {
     }
 
     /**
+    * Since inputTexture should always be defined, if we previously manually set `inputTexture`,
+    * the only way to unset it is to use this function to restore its internal state
+    */
+    public restoreDefaultInputTexture() {
+        this._forcedOutputTexture = null;
+    }
+
+    /**
     * Gets the camera which post process is applied to.
     * @returns The camera the post process is applied to.
     */

+ 23 - 0
src/Rendering/prePassEffectConfiguration.ts

@@ -0,0 +1,23 @@
+import { PostProcess } from "../PostProcesses/postProcess";
+
+/**
+ * Interface for defining prepass effects in the prepass post-process pipeline
+ */
+export interface PrePassEffectConfiguration {
+    /**
+     * Post process to attach for this effect
+     */
+    postProcess: PostProcess;
+    /**
+     * Is the effect enabled
+     */
+    enabled: boolean;
+    /**
+     * Disposes the effect configuration
+     */
+    dispose(): void;
+    /**
+     * Disposes the effect configuration
+     */
+    createPostProcess: () => PostProcess;
+}

+ 81 - 24
src/Rendering/prePassRenderer.ts

@@ -4,11 +4,12 @@ import { Scene } from "../scene";
 import { Engine } from "../Engines/engine";
 import { Constants } from "../Engines/constants";
 import { ImageProcessingPostProcess } from "../PostProcesses/imageProcessingPostProcess";
-import { SubSurfaceScatteringPostProcess } from "../PostProcesses/subSurfaceScatteringPostProcess";
+import { PostProcess } from "../PostProcesses/postProcess";
 import { Effect } from "../Materials/effect";
 import { _DevTools } from '../Misc/devTools';
 import { Color4 } from "../Maths/math.color";
 import { SubSurfaceConfiguration } from "./subSurfaceConfiguration";
+import { SSAO2RenderingPipeline } from "../PostProcesses/RenderPipeline/Pipelines/ssao2RenderingPipeline";
 
 /**
  * Renders a pre pass of the scene
@@ -45,6 +46,8 @@ export class PrePassRenderer {
     private _defaultAttachments: number[];
     private _clearAttachments: number[];
 
+    private _postProcesses: PostProcess[] = [];
+
     private readonly _clearColor = new Color4(0, 0, 0, 0);
 
     /**
@@ -53,14 +56,19 @@ export class PrePassRenderer {
     public imageProcessingPostProcess: ImageProcessingPostProcess;
 
     /**
-     * Post process for subsurface scattering
+     * Configuration for sub surface scattering post process
      */
-    public subSurfaceScatteringPostProcess: SubSurfaceScatteringPostProcess;
+    public subSurfaceConfiguration: SubSurfaceConfiguration;
 
     /**
-     * Configuration for sub surface scattering post process
+     * Should materials render their geometry on the MRT
      */
-    public subSurfaceConfiguration: SubSurfaceConfiguration;
+    public materialsShouldRenderGeometry: boolean = false;
+
+    /**
+     * Should materials render the irradiance information on the MRT
+     */
+    public materialsShouldRenderIrradiance: boolean = false;
 
     private _enabled: boolean = false;
 
@@ -79,8 +87,8 @@ export class PrePassRenderer {
     }
 
     public set samples(n: number) {
-        if (!this.subSurfaceScatteringPostProcess) {
-            this._createEffects();
+        if (!this.imageProcessingPostProcess) {
+            this._createCompositionEffect();
         }
 
         this.prePassRT.samples = n;
@@ -96,7 +104,7 @@ export class PrePassRenderer {
 
         PrePassRenderer._SceneComponentInitialization(this._scene);
 
-        this.subSurfaceConfiguration = new SubSurfaceConfiguration();
+        this.subSurfaceConfiguration = new SubSurfaceConfiguration(this._scene);
     }
 
     private _initializeAttachments() {
@@ -116,7 +124,7 @@ export class PrePassRenderer {
         }
     }
 
-    private _createEffects() {
+    private _createCompositionEffect() {
         this.prePassRT = new MultiRenderTarget("sceneprePassRT", { width: this._engine.getRenderWidth(), height: this._engine.getRenderHeight() }, this.mrtCount, this._scene,
             { generateMipMaps: false, generateDepthTexture: true, defaultType: Constants.TEXTURETYPE_UNSIGNED_INT, types: this._mrtTypes });
         this.prePassRT.samples = 1;
@@ -124,9 +132,7 @@ export class PrePassRenderer {
         this._initializeAttachments();
 
         this.imageProcessingPostProcess = new ImageProcessingPostProcess("sceneCompositionPass", 1, null, undefined, this._engine);
-        this.subSurfaceScatteringPostProcess = new SubSurfaceScatteringPostProcess("subSurfaceScattering", this._scene, 1, null, undefined, this._engine);
-        this.subSurfaceScatteringPostProcess.inputTexture = this.prePassRT.getInternalTexture()!;
-        this.subSurfaceScatteringPostProcess.autoClear = false;
+        this.imageProcessingPostProcess.autoClear = false;
     }
 
     /**
@@ -166,10 +172,11 @@ export class PrePassRenderer {
      */
     public _afterCameraDraw() {
         if (this._enabled) {
-            this.subSurfaceScatteringPostProcess.activate(this._scene.activeCamera);
-            this.imageProcessingPostProcess.activate(this._scene.activeCamera);
-            this._scene.postProcessManager.directRender([this.subSurfaceScatteringPostProcess], this.imageProcessingPostProcess.inputTexture);
-            this._scene.postProcessManager.directRender([this.imageProcessingPostProcess], null, false, 0, 0, false);
+            const firstCameraPP = this._scene.activeCamera && this._scene.activeCamera._getFirstPostProcess();
+            if (firstCameraPP) {
+                this._scene.postProcessManager._prepareFrame();
+            }
+            this._scene.postProcessManager.directRender(this._postProcesses, firstCameraPP ? firstCameraPP.inputTexture : null);
         }
     }
 
@@ -181,7 +188,8 @@ export class PrePassRenderer {
 
         if (width !== requiredWidth || height !== requiredHeight) {
             this.prePassRT.resize({ width: requiredWidth, height: requiredHeight });
-            this.subSurfaceScatteringPostProcess.inputTexture = this.prePassRT.getInternalTexture()!;
+
+            this._bindPostProcessChain();
         }
     }
 
@@ -218,19 +226,52 @@ export class PrePassRenderer {
     private _setState(enabled: boolean) {
         this._enabled = enabled;
         this._scene.prePass = enabled;
-        this.imageProcessingPostProcess.imageProcessingConfiguration.applyByPostProcess = enabled;
+
+        if (this.imageProcessingPostProcess) {
+            this.imageProcessingPostProcess.imageProcessingConfiguration.applyByPostProcess = enabled;
+        }
     }
 
     private _enable() {
-        if (!this.subSurfaceScatteringPostProcess) {
-            this._createEffects();
+        this._resetPostProcessChain();
+
+        if (this.subSurfaceConfiguration.enabled) {
+            if (!this.subSurfaceConfiguration.postProcess) {
+                this.subSurfaceConfiguration.createPostProcess();
+            }
+
+            this._postProcesses.push(this.subSurfaceConfiguration.postProcess);
         }
 
+        if (!this.imageProcessingPostProcess) {
+            this._createCompositionEffect();
+        }
+
+        this._postProcesses.push(this.imageProcessingPostProcess);
+        this._bindPostProcessChain();
         this._setState(true);
     }
 
     private _disable() {
         this._setState(false);
+        this.subSurfaceConfiguration.enabled = false;
+        this.materialsShouldRenderGeometry = false;
+        this.materialsShouldRenderIrradiance = false;
+    }
+
+    private _resetPostProcessChain() {
+        this._postProcesses = [];
+        if (this.imageProcessingPostProcess) {
+            this.imageProcessingPostProcess.restoreDefaultInputTexture();
+        }
+
+        if (this.subSurfaceConfiguration.postProcess) {
+            this.subSurfaceConfiguration.postProcess.restoreDefaultInputTexture();
+        }
+    }
+
+    private _bindPostProcessChain() {
+        this._postProcesses[0].inputTexture = this.prePassRT.getInternalTexture()!;
     }
 
     /**
@@ -242,20 +283,37 @@ export class PrePassRenderer {
 
     private _update() {
         this._disable();
+        let enablePrePass = false;
 
         // Subsurface scattering
         for (let i = 0; i < this._scene.materials.length; i++) {
             const material = this._scene.materials[i] as PBRBaseMaterial;
 
             if (material.subSurface && material.subSurface.isScatteringEnabled) {
-                this._enable();
+                this.subSurfaceConfiguration.enabled = true;
+                this.materialsShouldRenderIrradiance = true;
+                enablePrePass = true;
+
+                // 1 subsurface material is enough to activate post process
+                break;
             }
         }
 
-        // add SSAO 2 etc..
+        const pipelines = this._scene.postProcessRenderPipelineManager.supportedPipelines;
+        for (let i = 0; i < pipelines.length; i++) {
+            if (pipelines[i] instanceof SSAO2RenderingPipeline) {
+                this.materialsShouldRenderGeometry = true;
+                enablePrePass = true;
+                break;
+            }
+        }
 
         this._isDirty = false;
 
+        if (enablePrePass) {
+            this._enable();
+        }
+
         if (!this.enabled) {
             this._engine.bindAttachments(this._defaultAttachments);
         }
@@ -266,9 +324,8 @@ export class PrePassRenderer {
      */
     public dispose() {
         this.imageProcessingPostProcess.dispose();
-        this.subSurfaceScatteringPostProcess.dispose();
-        this.prePassRT.dispose();
         this.subSurfaceConfiguration.dispose();
+        this.prePassRT.dispose();
     }
 
 }

+ 30 - 3
src/Rendering/subSurfaceConfiguration.ts

@@ -1,16 +1,23 @@
 import { Logger } from "../Misc/logger";
+import { Scene } from "../scene";
 import { Color3 } from "../Maths/math.color";
-
+import { SubSurfaceScatteringPostProcess } from "../PostProcesses/subSurfaceScatteringPostProcess";
+import { PrePassEffectConfiguration } from "./prePassEffectConfiguration";
 /**
  * Contains all parameters needed for the prepass to perform
  * screen space subsurface scattering
  */
-export class SubSurfaceConfiguration {
+export class SubSurfaceConfiguration implements PrePassEffectConfiguration {
     private _ssDiffusionS: number[] = [];
     private _ssFilterRadii: number[] = [];
     private _ssDiffusionD: number[] = [];
 
     /**
+     * Post process to attach for screen space subsurface scattering
+     */
+    public postProcess: SubSurfaceScatteringPostProcess;
+
+    /**
      * Diffusion profile color for subsurface scattering
      */
     public get ssDiffusionS() {
@@ -32,6 +39,11 @@ export class SubSurfaceConfiguration {
     }
 
     /**
+     * Is subsurface enabled
+     */
+    public enabled = false;
+
+    /**
      * Diffusion profile colors for subsurface scattering
      * You can add one diffusion color using `addDiffusionProfile` on `scene.prePassRenderer`
      * See ...
@@ -45,13 +57,16 @@ export class SubSurfaceConfiguration {
      */
     public metersPerUnit: number = 1;
 
+    private _scene: Scene;
+
     /**
      * Builds a subsurface configuration object
      * @param scene The scene
      */
-    constructor() {
+    constructor(scene: Scene) {
         // Adding default diffusion profile
         this.addDiffusionProfile(new Color3(1, 1, 1));
+        this._scene = scene;
     }
 
     /**
@@ -85,6 +100,17 @@ export class SubSurfaceConfiguration {
     }
 
     /**
+     * Creates the sss post process
+     * @return The created post process
+     */
+    public createPostProcess() : SubSurfaceScatteringPostProcess {
+        this.postProcess = new SubSurfaceScatteringPostProcess("subSurfaceScattering", this._scene, 1, null, undefined, this._scene.getEngine());
+        this.postProcess.autoClear = false;
+
+        return this.postProcess;
+    }
+
+    /**
      * Deletes all diffusion profiles.
      * Note that in order to render subsurface scattering, you should have at least 1 diffusion profile.
      */
@@ -100,6 +126,7 @@ export class SubSurfaceConfiguration {
      */
     public dispose() {
         this.clearAllDiffusionProfiles();
+        this.postProcess.dispose();
     }
 
     /**

+ 12 - 0
src/Shaders/default.fragment.fx

@@ -4,6 +4,8 @@
 #extension GL_OES_standard_derivatives : enable
 #endif
 
+#include<prePassDeclaration>[SCENE_MRT_COUNT]
+
 #define CUSTOM_FRAGMENT_BEGIN
 
 #ifdef LOGARITHMICDEPTH
@@ -35,6 +37,10 @@ varying vec4 vColor;
 	varying vec2 vMainUV2;
 #endif
 
+#ifdef PREPASS
+	varying vec3 vViewPos;
+#endif
+
 // Helper functions
 #include<helperFunctions>
 
@@ -474,6 +480,12 @@ color.rgb = max(color.rgb, 0.);
 #endif
 
 #define CUSTOM_FRAGMENT_BEFORE_FRAGCOLOR
+#ifdef PREPASS
+    gl_FragData[0] = color; // Lit without irradiance
+    gl_FragData[1] = vec4(0.0, 0.0, 0.0, 1.0); // Irradiance
+    gl_FragData[2] = vec4(vViewPos.z, (view * vec4(normalW, 0.0)).rgb); // Linear depth + normal
+    gl_FragData[3] = vec4(0.0, 0.0, 0.0, 1.0); // albedo, for pre and post scatter
+#endif
 	gl_FragColor = color;
 
 }

+ 7 - 0
src/Shaders/default.vertex.fx

@@ -27,6 +27,10 @@ attribute vec4 color;
 // Uniforms
 #include<instancesDeclaration>
 
+#ifdef PREPASS
+varying vec3 vViewPos;
+#endif
+
 #ifdef MAINUV1
 	varying vec2 vMainUV1;
 #endif
@@ -156,6 +160,9 @@ void main(void) {
 #endif	
 
 	vPositionW = vec3(worldPos);
+	#ifdef PREPASS
+	    vViewPos = (view * worldPos).rgb;
+	#endif
 
 #if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED)
 	vDirectionW = normalize(vec3(finalWorld * vec4(positionUpdated, 0.0)));

+ 4 - 3
src/Shaders/pbr.fragment.fx

@@ -510,14 +510,15 @@ void main(void) {
     vec3 sqAlbedo = sqrt(surfaceAlbedo); // for pre and post scatter
 
     // Irradiance is diffuse * surfaceAlbedo
+    #ifdef SS_SCATTERING
     gl_FragData[0] = vec4(finalColor.rgb - irradiance, finalColor.a); // Lit without irradiance
     irradiance /= sqAlbedo;
-    #ifdef SS_SCATTERING
     gl_FragData[1] = vec4(tagLightingForSSS(irradiance), scatteringDiffusionProfile / 255.); // Irradiance + SS diffusion profile
     #else
-    gl_FragData[1] = vec4(irradiance, 1.0); // Irradiance
+    gl_FragData[0] = vec4(finalColor.rgb, finalColor.a); // Lit without irradiance
+    gl_FragData[1] = vec4(0.0, 0.0, 0.0, 1.0); // Irradiance
     #endif
-    gl_FragData[2] = vec4(vViewPos.z, 0.0, 0.0, 1.0); // Linear depth
+    gl_FragData[2] = vec4(vViewPos.z, (view * vec4(normalW, 0.0)).rgb); // Linear depth + normal
     gl_FragData[3] = vec4(sqAlbedo, 1.0); // albedo, for pre and post scatter
 #endif
 

+ 194 - 176
src/Shaders/ssao2.fragment.fx

@@ -39,195 +39,213 @@ float viewZToOrthographicDepth( const in float viewZ, const in float near, const
 }
 
 #ifdef SSAO
-uniform sampler2D randomSampler;
-uniform sampler2D normalSampler;
-
-uniform float randTextureTiles;
-uniform float samplesFactor;
-uniform vec3 sampleSphere[SAMPLES];
-
-uniform float totalStrength;
-uniform float base;
-uniform float xViewport;
-uniform float yViewport;
-uniform float maxZ;
-uniform float minZAspect;
-uniform vec2 texelSize;
-
-uniform mat4 projection;
-
-void main()
-{
-	vec3 random = texture2D(randomSampler, vUV * randTextureTiles).rgb;
-	float depth = texture2D(textureSampler, vUV).r;
-	float depthSign = depth / abs(depth);
-	depth = depth * depthSign;
-	vec3 normal = texture2D(normalSampler, vUV).rgb; 
-	float occlusion = 0.0;
-	float correctedRadius = min(radius, minZAspect * depth / near);
-
-	vec3 vViewRay = vec3((vUV.x * 2.0 - 1.0)*xViewport, (vUV.y * 2.0 - 1.0)*yViewport, depthSign);
-	vec3 origin = vViewRay * depth;
-	vec3 rvec = random * 2.0 - 1.0;
-	rvec.z = 0.0;
-
-	// Avoid numerical precision issue while applying Gram-Schmidt
-	float dotProduct = dot(rvec, normal);
-	rvec = 1.0 - abs(dotProduct) > 1e-2 ? rvec : vec3(-rvec.y, 0.0, rvec.x);
-	vec3 tangent = normalize(rvec - normal * dot(rvec, normal));
-	vec3 bitangent = cross(normal, tangent);
-	mat3 tbn = mat3(tangent, bitangent, normal);
-
-	float difference;
-
-	for (int i = 0; i < SAMPLES; ++i) {
-		// get sample position:
-	   vec3 samplePosition = scales[(i + int(random.x * 16.0)) % 16] * tbn * sampleSphere[(i + int(random.y * 16.0)) % 16];
-	   samplePosition = samplePosition * correctedRadius + origin;
-	  
-		// project sample position:
-	   vec4 offset = vec4(samplePosition, 1.0);
-	   offset = projection * offset;
-	   offset.xyz /= offset.w;
-	   offset.xy = offset.xy * 0.5 + 0.5;
-
-	   if (offset.x < 0.0 || offset.y < 0.0 || offset.x > 1.0 || offset.y > 1.0) {
-	     continue;
-	   }
-	  
-		// get sample linearDepth:
-	   float sampleDepth = abs(texture2D(textureSampler, offset.xy).r);
-		// range check & accumulate:
-	   difference = depthSign * samplePosition.z - sampleDepth;
-	   float rangeCheck = 1.0 - smoothstep(correctedRadius*0.5, correctedRadius, difference);
-	   occlusion += (difference >= 0.0 ? 1.0 : 0.0) * rangeCheck;
+	uniform sampler2D randomSampler;
+
+	#ifndef GEOMETRYBUFFER
+		uniform sampler2D depthNormalSampler;
+	#else
+		uniform sampler2D depthSampler;
+		uniform sampler2D normalSampler;
+	#endif
+
+	uniform float randTextureTiles;
+	uniform float samplesFactor;
+	uniform vec3 sampleSphere[SAMPLES];
+
+	uniform float totalStrength;
+	uniform float base;
+	uniform float xViewport;
+	uniform float yViewport;
+	uniform float maxZ;
+	uniform float minZAspect;
+	uniform vec2 texelSize;
+
+	uniform mat4 projection;
+
+	void main()
+	{
+		vec3 random = texture2D(randomSampler, vUV * randTextureTiles).rgb;
+		#ifndef GEOMETRYBUFFER
+			float depth = texture2D(depthNormalSampler, vUV).r;
+		#else 
+			float depth = texture2D(depthSampler, vUV).r;
+		#endif
+		float depthSign = depth / abs(depth);
+		depth = depth * depthSign;
+		#ifndef GEOMETRYBUFFER
+			vec3 normal = texture2D(depthNormalSampler, vUV).gba; 
+		#else 
+			vec3 normal = texture2D(normalSampler, vUV).rgb;
+		#endif
+		float occlusion = 0.0;
+		float correctedRadius = min(radius, minZAspect * depth / near);
+
+		vec3 vViewRay = vec3((vUV.x * 2.0 - 1.0)*xViewport, (vUV.y * 2.0 - 1.0)*yViewport, depthSign);
+		vec3 origin = vViewRay * depth;
+		vec3 rvec = random * 2.0 - 1.0;
+		rvec.z = 0.0;
+
+		// Avoid numerical precision issue while applying Gram-Schmidt
+		float dotProduct = dot(rvec, normal);
+		rvec = 1.0 - abs(dotProduct) > 1e-2 ? rvec : vec3(-rvec.y, 0.0, rvec.x);
+		vec3 tangent = normalize(rvec - normal * dot(rvec, normal));
+		vec3 bitangent = cross(normal, tangent);
+		mat3 tbn = mat3(tangent, bitangent, normal);
+
+		float difference;
+
+		for (int i = 0; i < SAMPLES; ++i) {
+			// get sample position:
+		    vec3 samplePosition = scales[(i + int(random.x * 16.0)) % 16] * tbn * sampleSphere[(i + int(random.y * 16.0)) % 16];
+		    samplePosition = samplePosition * correctedRadius + origin;
+		  
+			// project sample position:
+		    vec4 offset = vec4(samplePosition, 1.0);
+		    offset = projection * offset;
+		    offset.xyz /= offset.w;
+		    offset.xy = offset.xy * 0.5 + 0.5;
+
+		    if (offset.x < 0.0 || offset.y < 0.0 || offset.x > 1.0 || offset.y > 1.0) {
+		        continue;
+		    }
+		  
+			// get sample linearDepth:
+			#ifndef GEOMETRYBUFFER
+		    	float sampleDepth = abs(texture2D(depthNormalSampler, offset.xy).r);
+		    #else
+		    	float sampleDepth = abs(texture2D(depthSampler, offset.xy).r);
+		    #endif
+			// range check & accumulate:
+		    difference = depthSign * samplePosition.z - sampleDepth;
+		    float rangeCheck = 1.0 - smoothstep(correctedRadius*0.5, correctedRadius, difference);
+		    occlusion += (difference >= 0.0 ? 1.0 : 0.0) * rangeCheck;
+		}
+		occlusion = occlusion*(1.0 - smoothstep(maxZ * 0.75, maxZ, depth));
+		float ao = 1.0 - totalStrength * occlusion * samplesFactor;
+		float result = clamp(ao + base, 0.0, 1.0);
+		gl_FragColor = vec4(vec3(result), 1.0);
 	}
-	occlusion = occlusion*(1.0 - smoothstep(maxZ * 0.75, maxZ, depth));
-	float ao = 1.0 - totalStrength * occlusion * samplesFactor;
-	float result = clamp(ao + base, 0.0, 1.0);
-	gl_FragColor = vec4(vec3(result), 1.0);
-}
 #endif
 
 #ifdef BILATERAL_BLUR
-uniform sampler2D depthSampler;
-uniform float outSize;
-uniform float samplerOffsets[SAMPLES];
-
-vec4 blur9(sampler2D image, vec2 uv, float resolution, vec2 direction) {
-  vec4 color = vec4(0.0);
-  vec2 off1 = vec2(1.3846153846) * direction;
-  vec2 off2 = vec2(3.2307692308) * direction;
-  color += texture2D(image, uv) * 0.2270270270;
-  color += texture2D(image, uv + (off1 / resolution)) * 0.3162162162;
-  color += texture2D(image, uv - (off1 / resolution)) * 0.3162162162;
-  color += texture2D(image, uv + (off2 / resolution)) * 0.0702702703;
-  color += texture2D(image, uv - (off2 / resolution)) * 0.0702702703;
-  return color;
-}
-
-vec4 blur13(sampler2D image, vec2 uv, float resolution, vec2 direction) {
-  vec4 color = vec4(0.0);
-  vec2 off1 = vec2(1.411764705882353) * direction;
-  vec2 off2 = vec2(3.2941176470588234) * direction;
-  vec2 off3 = vec2(5.176470588235294) * direction;
-  color += texture2D(image, uv) * 0.1964825501511404;
-  color += texture2D(image, uv + (off1 / resolution)) * 0.2969069646728344;
-  color += texture2D(image, uv - (off1 / resolution)) * 0.2969069646728344;
-  color += texture2D(image, uv + (off2 / resolution)) * 0.09447039785044732;
-  color += texture2D(image, uv - (off2 / resolution)) * 0.09447039785044732;
-  color += texture2D(image, uv + (off3 / resolution)) * 0.010381362401148057;
-  color += texture2D(image, uv - (off3 / resolution)) * 0.010381362401148057;
-  return color;
-}
+	uniform sampler2D depthNormalSampler;
+	uniform float outSize;
+	uniform float samplerOffsets[SAMPLES];
+
+	vec4 blur9(sampler2D image, vec2 uv, float resolution, vec2 direction) {
+	  vec4 color = vec4(0.0);
+	  vec2 off1 = vec2(1.3846153846) * direction;
+	  vec2 off2 = vec2(3.2307692308) * direction;
+	  color += texture2D(image, uv) * 0.2270270270;
+	  color += texture2D(image, uv + (off1 / resolution)) * 0.3162162162;
+	  color += texture2D(image, uv - (off1 / resolution)) * 0.3162162162;
+	  color += texture2D(image, uv + (off2 / resolution)) * 0.0702702703;
+	  color += texture2D(image, uv - (off2 / resolution)) * 0.0702702703;
+	  return color;
+	}
 
-vec4 blur13Bilateral(sampler2D image, vec2 uv, float resolution, vec2 direction) {
-  vec4 color = vec4(0.0);
-  vec2 off1 = vec2(1.411764705882353) * direction;
-  vec2 off2 = vec2(3.2941176470588234) * direction;
-  vec2 off3 = vec2(5.176470588235294) * direction;
-
-  float compareDepth = abs(texture2D(depthSampler, uv).r);
-  float sampleDepth;
-  float weight;
-  float weightSum = 30.0;
-
-  color += texture2D(image, uv) * 30.0;
-
-  sampleDepth = abs(texture2D(depthSampler, uv + (off1 / resolution)).r);
-  weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30.0);
-  weightSum +=  weight;
-  color += texture2D(image, uv + (off1 / resolution)) * weight;
-
-  sampleDepth = abs(texture2D(depthSampler, uv - (off1 / resolution)).r);
-  weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30.0);
-  weightSum +=  weight;
-  color += texture2D(image, uv - (off1 / resolution)) * weight;
-
-  sampleDepth = abs(texture2D(depthSampler, uv + (off2 / resolution)).r);
-  weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30.0);
-  weightSum += weight;
-  color += texture2D(image, uv + (off2 / resolution)) * weight;
-
-  sampleDepth = abs(texture2D(depthSampler, uv - (off2 / resolution)).r);
-  weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30.0);
-  weightSum += weight;
-  color += texture2D(image, uv - (off2 / resolution)) * weight;
-
-  sampleDepth = abs(texture2D(depthSampler, uv + (off3 / resolution)).r);
-  weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30.0);
-  weightSum += weight;
-  color += texture2D(image, uv + (off3 / resolution)) * weight;
-
-  sampleDepth = abs(texture2D(depthSampler, uv - (off3 / resolution)).r);
-  weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30.0);
-  weightSum += weight;
-  color += texture2D(image, uv - (off3 / resolution)) * weight;
-
-  return color / weightSum;
-}
+	vec4 blur13(sampler2D image, vec2 uv, float resolution, vec2 direction) {
+	  vec4 color = vec4(0.0);
+	  vec2 off1 = vec2(1.411764705882353) * direction;
+	  vec2 off2 = vec2(3.2941176470588234) * direction;
+	  vec2 off3 = vec2(5.176470588235294) * direction;
+	  color += texture2D(image, uv) * 0.1964825501511404;
+	  color += texture2D(image, uv + (off1 / resolution)) * 0.2969069646728344;
+	  color += texture2D(image, uv - (off1 / resolution)) * 0.2969069646728344;
+	  color += texture2D(image, uv + (off2 / resolution)) * 0.09447039785044732;
+	  color += texture2D(image, uv - (off2 / resolution)) * 0.09447039785044732;
+	  color += texture2D(image, uv + (off3 / resolution)) * 0.010381362401148057;
+	  color += texture2D(image, uv - (off3 / resolution)) * 0.010381362401148057;
+	  return color;
+	}
 
-void main()
-{
-	#if EXPENSIVE
-	float compareDepth = abs(texture2D(depthSampler, vUV).r);
-	float texelsize = 1.0 / outSize;
-	float result = 0.0;
-	float weightSum = 0.0;
+	vec4 blur13Bilateral(sampler2D image, vec2 uv, float resolution, vec2 direction) {
+	  vec4 color = vec4(0.0);
+	  vec2 off1 = vec2(1.411764705882353) * direction;
+	  vec2 off2 = vec2(3.2941176470588234) * direction;
+	  vec2 off3 = vec2(5.176470588235294) * direction;
+
+	  float compareDepth = abs(texture2D(depthNormalSampler, uv).r);
+	  float sampleDepth;
+	  float weight;
+	  float weightSum = 30.0;
+
+	  color += texture2D(image, uv) * 30.0;
+
+	  sampleDepth = abs(texture2D(depthNormalSampler, uv + (off1 / resolution)).r);
+	  weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30.0);
+	  weightSum +=  weight;
+	  color += texture2D(image, uv + (off1 / resolution)) * weight;
+
+	  sampleDepth = abs(texture2D(depthNormalSampler, uv - (off1 / resolution)).r);
+	  weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30.0);
+	  weightSum +=  weight;
+	  color += texture2D(image, uv - (off1 / resolution)) * weight;
+
+	  sampleDepth = abs(texture2D(depthNormalSampler, uv + (off2 / resolution)).r);
+	  weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30.0);
+	  weightSum += weight;
+	  color += texture2D(image, uv + (off2 / resolution)) * weight;
+
+	  sampleDepth = abs(texture2D(depthNormalSampler, uv - (off2 / resolution)).r);
+	  weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30.0);
+	  weightSum += weight;
+	  color += texture2D(image, uv - (off2 / resolution)) * weight;
+
+	  sampleDepth = abs(texture2D(depthNormalSampler, uv + (off3 / resolution)).r);
+	  weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30.0);
+	  weightSum += weight;
+	  color += texture2D(image, uv + (off3 / resolution)) * weight;
+
+	  sampleDepth = abs(texture2D(depthNormalSampler, uv - (off3 / resolution)).r);
+	  weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30.0);
+	  weightSum += weight;
+	  color += texture2D(image, uv - (off3 / resolution)) * weight;
+
+	  return color / weightSum;
+	}
 
-	for (int i = 0; i < SAMPLES; ++i)
+	void main()
 	{
+		#if EXPENSIVE
+		float compareDepth = abs(texture2D(depthNormalSampler, vUV).r);
+		float texelsize = 1.0 / outSize;
+		float result = 0.0;
+		float weightSum = 0.0;
+
+		for (int i = 0; i < SAMPLES; ++i)
+		{
+			#ifdef BILATERAL_BLUR_H
+			vec2 direction = vec2(1.0, 0.0);
+			vec2 sampleOffset = vec2(texelsize * samplerOffsets[i], 0.0);
+			#else
+			vec2 direction = vec2(0.0, 1.0);
+			vec2 sampleOffset = vec2(0.0, texelsize * samplerOffsets[i]);
+			#endif
+			vec2 samplePos = vUV + sampleOffset;
+
+			float sampleDepth = abs(texture2D(depthNormalSampler, samplePos).r);
+			float weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30000.0);
+
+			result += texture2D(textureSampler, samplePos).r * weight;
+			weightSum += weight;
+		}
+
+		result /= weightSum;
+		gl_FragColor.rgb = vec3(result);
+		gl_FragColor.a = 1.0;
+		#else
+		vec4 color;
 		#ifdef BILATERAL_BLUR_H
 		vec2 direction = vec2(1.0, 0.0);
-		vec2 sampleOffset = vec2(texelsize * samplerOffsets[i], 0.0);
+		color = blur13Bilateral(textureSampler, vUV, outSize, direction);
 		#else
 		vec2 direction = vec2(0.0, 1.0);
-		vec2 sampleOffset = vec2(0.0, texelsize * samplerOffsets[i]);
+		color = blur13Bilateral(textureSampler, vUV, outSize, direction);
 		#endif
-		vec2 samplePos = vUV + sampleOffset;
-
-		float sampleDepth = abs(texture2D(depthSampler, samplePos).r);
-		float weight = clamp(1.0 / ( 0.003 + abs(compareDepth - sampleDepth)), 0.0, 30000.0);
 
-		result += texture2D(textureSampler, samplePos).r * weight;
-		weightSum += weight;
+		gl_FragColor.rgb = vec3(color.r);
+		gl_FragColor.a = 1.0;
+		#endif
 	}
-
-	result /= weightSum;
-	gl_FragColor.rgb = vec3(result);
-	gl_FragColor.a = 1.0;
-	#else
-	vec4 color;
-	#ifdef BILATERAL_BLUR_H
-	vec2 direction = vec2(1.0, 0.0);
-	color = blur13Bilateral(textureSampler, vUV, outSize, direction);
-	#else
-	vec2 direction = vec2(0.0, 1.0);
-	color = blur13Bilateral(textureSampler, vUV, outSize, direction);
-	#endif
-
-	gl_FragColor.rgb = vec3(color.r);
-	gl_FragColor.a = 1.0;
-	#endif
-}
 #endif