Explorar el Código

Merge pull request #3865 from TrevorDev/keepStateDefaultPipeline

Keep state default pipeline
David Catuhe hace 7 años
padre
commit
8a118ba02e

+ 1 - 1
dist/preview release/what's new.md

@@ -14,7 +14,7 @@
 - Added [VideoDome](http://doc.babylonjs.com/how_to/360videodome) class to easily support 360 videos ([DavidHGillen](https://github.com/DavidHGillen))
 - Added [GlowLayer](https://doc.babylonjs.com/how_to/glow_layer) to easily support glow from emissive materials ([sebavan](https://github.com/sebavan))
 - New [AssetContainer](http://doc.babylonjs.com/how_to/how_to_use_assetcontainer) Class and loading methods ([trevordev](https://github.com/trevordev))
-- Added depth of field, MSAA, sharpening, grain and chromatic aberration effect to the default pipeline ([trevordev](https://github.com/trevordev))
+- Added depth of field, MSAA, sharpening, chromatic aberration and grain effect to the default pipeline ([trevordev](https://github.com/trevordev))
 
 ## Updates
 

+ 99 - 40
src/PostProcess/RenderPipeline/Pipelines/babylon.defaultRenderingPipeline.ts

@@ -5,7 +5,7 @@
      */
     export class DefaultRenderingPipeline extends PostProcessRenderPipeline implements IDisposable, IAnimatable {
         private _scene: Scene;
-
+        private _originalCameras:Array<Camera> = [];
         /**
 		 * ID of the sharpen post process,
 		 */
@@ -52,6 +52,7 @@
 		 * Sharpen post process which will apply a sharpen convolution to enhance edges
 		 */
         public sharpen: SharpenPostProcess;
+        private _sharpenEffect: PostProcessRenderEffect;
         /**
 		 * First pass of bloom to capture the original image texture for later use.
 		 */
@@ -92,6 +93,7 @@
 		 * Chromatic aberration post process which will shift rgb colors in the image
 		 */
         public chromaticAberration: ChromaticAberrationPostProcess;
+        private _chromaticAberrationEffect: PostProcessRenderEffect;
 
         /**
          * Animations which can be used to tweak settings over a period of time
@@ -228,6 +230,20 @@
                 return;
             }
             this._depthOfFieldBlurLevel = value;
+            
+            // recreate dof and dispose old as this setting is not dynamic
+            var oldDof = this.depthOfField;
+
+            this.depthOfField = new DepthOfFieldEffect(this._scene, null, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType);
+            this.depthOfField.focalLength = oldDof.focalLength;
+            this.depthOfField.focusDistance = oldDof.focusDistance;
+            this.depthOfField.fStop = oldDof.fStop;
+            this.depthOfField.lensSize = oldDof.lensSize;
+            
+            for (var i = 0; i < this._cameras.length; i++) {
+                oldDof.disposeEffects(this._cameras[i]);
+            }
+
             this._buildPipeline();
         }
 
@@ -310,6 +326,7 @@
         constructor(name: string, hdr: boolean, scene: Scene, cameras?: Camera[], automaticBuild = true) {
             super(scene.getEngine(), name);
             this._cameras = cameras ||  [];
+            this._originalCameras = this._cameras.slice();
 
             this._buildAllowed = automaticBuild;
 
@@ -333,6 +350,15 @@
             // Attach
             scene.postProcessRenderPipelineManager.addPipeline(this);
 
+            var engine = this._scene.getEngine();
+            this.sharpen = new SharpenPostProcess("sharpen", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
+            this._sharpenEffect = new PostProcessRenderEffect(engine, this.SharpenPostProcessId, () => { return this.sharpen; }, true);
+
+            this.depthOfField = new DepthOfFieldEffect(this._scene, null, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType);
+
+            this.chromaticAberration = new ChromaticAberrationPostProcess("ChromaticAberration", engine.getRenderWidth(), engine.getRenderHeight(), 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
+            this._chromaticAberrationEffect = new PostProcessRenderEffect(engine, this.ChromaticAberrationPostProcessId, () => { return this.chromaticAberration; }, true);
+            
             this._buildPipeline();
         }
 
@@ -346,6 +372,30 @@
             this._buildAllowed = previousState;
         }
 
+        private _prevPostProcess:Nullable<PostProcess> = null;
+        private _prevPrevPostProcess:Nullable<PostProcess> = null;
+
+        private _setAutoClearAndTextureSharing(postProcess:PostProcess, skipTextureSharing = false){
+            if(this._prevPostProcess && this._prevPostProcess.autoClear){
+                postProcess.autoClear = false;
+            }else{
+                postProcess.autoClear = true;
+            }
+
+            if(!skipTextureSharing){
+                if(this._prevPrevPostProcess){
+                    postProcess.shareOutputWith(this._prevPrevPostProcess);
+                }else{
+                    postProcess.useOwnOutput();
+                }
+
+                if(this._prevPostProcess){
+                    this._prevPrevPostProcess = this._prevPostProcess;
+                }
+                this._prevPostProcess = postProcess;
+            }
+        }
+
         private _buildPipeline() {
             if (!this._buildAllowed) {
                 return;
@@ -354,23 +404,37 @@
             var engine = this._scene.getEngine();
 
             this._disposePostProcesses();
+            if (this._cameras !== null) {
+                this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
+                // get back cameras to be used to reattach pipeline
+                this._cameras = this._originalCameras.slice();
+            }
             this._reset();
+            this._prevPostProcess = null;
+            this._prevPrevPostProcess = null;
+
+            if (this.fxaaEnabled) {
+                this.fxaa = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
+                this.addEffect(new PostProcessRenderEffect(engine, this.FxaaPostProcessId, () => { return this.fxaa; }, true));
+                this._setAutoClearAndTextureSharing(this.fxaa);
+                
+            }
 
             if (this.sharpenEnabled) {
-                this.sharpen = new SharpenPostProcess("sharpen", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
-                this.addEffect(new PostProcessRenderEffect(engine, this.SharpenPostProcessId, () => { return this.sharpen; }, true));
+                this.addEffect(this._sharpenEffect);
+                this._setAutoClearAndTextureSharing(this.sharpen);
             }
 
-            if(this.depthOfFieldEnabled){
-                // Enable and get current depth map
+            if (this.depthOfFieldEnabled) {
                 var depthTexture = this._scene.enableDepthRenderer(this._cameras[0]).getDepthMap();
-
-                this.depthOfField = new DepthOfFieldEffect(this._scene, depthTexture, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType);
+                this.depthOfField.depthTexture = depthTexture;
                 this.addEffect(this.depthOfField);
+                this._setAutoClearAndTextureSharing(this.depthOfField._depthOfFieldMerge);
             }
 
             if (this.bloomEnabled) {
                 this.pass = new PassPostProcess("sceneRenderTarget", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
+                this._setAutoClearAndTextureSharing(this.pass, true);
                 this.addEffect(new PostProcessRenderEffect(engine, this.PassPostProcessId, () => { return this.pass; }, true));
 
                 if (!this._hdr) { // Need to enhance highlights if not using float rendering
@@ -420,18 +484,14 @@
                 }
             }
 
-            if (this.fxaaEnabled) {
-                this.fxaa = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
-                this.addEffect(new PostProcessRenderEffect(engine, this.FxaaPostProcessId, () => { return this.fxaa; }, true));
-
-                this.fxaa.autoClear = !this.bloomEnabled && (!this._hdr || !this.imageProcessing);
-            } else if (this._hdr && this.imageProcessing) {
+            if (this._hdr && this.imageProcessing) {
                 this.finalMerge = this.imageProcessing;
             }
             else {
                 this.finalMerge = new PassPostProcess("finalMerge", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
                 this.addEffect(new PostProcessRenderEffect(engine, this.FinalMergePostProcessId, () => { return this.finalMerge; }, true));
-
+                this._setAutoClearAndTextureSharing(this.finalMerge, true);
+                
                 this.finalMerge.autoClear = !this.bloomEnabled && (!this._hdr || !this.imageProcessing);
             }
 
@@ -441,30 +501,23 @@
                     if (this.imageProcessing) {
                         this.imageProcessing.shareOutputWith(this.pass);
                         this.imageProcessing.autoClear = false;
-                    } else if (this.fxaa) {
-                        this.fxaa.shareOutputWith(this.pass);
                     } else {
                         this.finalMerge.shareOutputWith(this.pass);
                     }
                 } else {
-                    if (this.fxaa) {
-                        this.fxaa.shareOutputWith(this.pass);
-                    } else {
-                        this.finalMerge.shareOutputWith(this.pass);
-                    }
+                    this.finalMerge.shareOutputWith(this.pass);
                 }
             }
 
             if (this.chromaticAberrationEnabled) {
-                this.chromaticAberration = new ChromaticAberrationPostProcess("ChromaticAberration", engine.getRenderWidth(), engine.getRenderHeight(), 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
-                this.addEffect(new PostProcessRenderEffect(engine, this.ChromaticAberrationPostProcessId, () => { return this.chromaticAberration; }, true));
+                this.addEffect(this._chromaticAberrationEffect);
+                this._setAutoClearAndTextureSharing(this.chromaticAberration);
             }
 
-
             if (this._cameras !== null) {
                 this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
             }
-            
+
             if(this.msaaEnabled){
                 if(!this._enableMSAAOnFirstPostProcess()){
                     BABYLON.Tools.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0");
@@ -472,14 +525,10 @@
             }
         }
 
-        private _disposePostProcesses(): void {
+        private _disposePostProcesses(disposeNonRecreated = false): void {
             for (var i = 0; i < this._cameras.length; i++) {
                 var camera = this._cameras[i];
 
-                if (this.sharpen) {
-                    this.sharpen.dispose(camera);
-                }
-
                 if (this.pass) {
                     this.pass.dispose(camera);
                 }
@@ -512,16 +561,22 @@
                     this.finalMerge.dispose(camera);
                 }
 
-                if(this.depthOfField){
-                    this.depthOfField.disposeEffects(camera);
-                }
-
-                if(this.chromaticAberration){
-                    this.chromaticAberration.dispose(camera);
+                // These are created in the constructor and should not be disposed on every pipeline change
+                if(disposeNonRecreated){
+                    if (this.sharpen) {
+                        this.sharpen.dispose(camera);
+                    }
+    
+                    if(this.depthOfField){
+                        this.depthOfField.disposeEffects(camera);
+                    }
+    
+                    if(this.chromaticAberration){
+                        this.chromaticAberration.dispose(camera);
+                    }
                 }
             }
 
-            (<any>this.sharpen) = null;
             (<any>this.pass) = null;
             (<any>this.highlights) = null;
             (<any>this.blurX) = null;
@@ -530,15 +585,19 @@
             (<any>this.imageProcessing) = null;
             (<any>this.fxaa) = null;
             (<any>this.finalMerge) = null;
-            (<any>this.depthOfField) = null;
-            (<any>this.chromaticAberration) = null;
+
+            if(disposeNonRecreated){
+                (<any>this.sharpen) = null;
+                (<any>this.depthOfField) = null;
+                (<any>this.chromaticAberration) = null;
+            } 
         }
 
         /**
          * Dispose of the pipeline and stop all post processes
          */
         public dispose(): void {
-            this._disposePostProcesses();
+            this._disposePostProcesses(true);
 
             this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
 

+ 21 - 8
src/PostProcess/babylon.circleOfConfusionPostProcess.ts

@@ -6,24 +6,25 @@ module BABYLON {
         /**
          * Max lens size in scene units/1000 (eg. millimeter). Standard cameras are 50mm. (default: 50) The diamater of the resulting aperture can be computed by lensSize/fStop.
          */
-        lensSize = 50
+        public lensSize = 50
         /**
          * F-Stop of the effect's camera. The diamater of the resulting aperture can be computed by lensSize/fStop. (default: 1.4)
          */
-        fStop = 1.4;
+        public fStop = 1.4;
         /**
          * Distance away from the camera to focus on in scene units/1000 (eg. millimeter). (default: 2000)
          */
-        focusDistance = 2000;
+        public focusDistance = 2000;
         /**
          * Focal length of the effect's camera in scene units/1000 (eg. millimeter). (default: 50)
          */
-        focalLength = 50;
+        public focalLength = 50;
         
+        private _depthTexture:Nullable<RenderTargetTexture> = null;
         /**
          * Creates a new instance of @see CircleOfConfusionPostProcess
          * @param name The name of the effect.
-         * @param depthTexture The depth texture of the scene to compute the circle of confusion.
+         * @param depthTexture The depth texture of the scene to compute the circle of confusion. This must be set in order for this to function but may be set after initialization if needed.
          * @param options The required width/height ratio to downsize to before computing the render pass.
          * @param camera The camera to apply the render pass to.
          * @param samplingMode The sampling mode to be used when computing the pass. (default: 0)
@@ -31,10 +32,15 @@ module BABYLON {
          * @param reusable If the post process can be reused on the same frame. (default: false)
          * @param textureType Type of textures used when performing the post process. (default: 0)
          */
-        constructor(name: string, depthTexture: RenderTargetTexture, options: number | PostProcessOptions, camera: Nullable<Camera>, samplingMode?: number, engine?: Engine, reusable?: boolean, textureType: number = Engine.TEXTURETYPE_UNSIGNED_INT) {
+        constructor(name: string, depthTexture: Nullable<RenderTargetTexture>, options: number | PostProcessOptions, camera: Nullable<Camera>, samplingMode?: number, engine?: Engine, reusable?: boolean, textureType: number = Engine.TEXTURETYPE_UNSIGNED_INT) {
             super(name, "circleOfConfusion", ["cameraMinMaxZ", "focusDistance", "cocPrecalculation"], ["depthSampler"], options, camera, samplingMode, engine, reusable, null, textureType);
+            this._depthTexture = depthTexture;
             this.onApplyObservable.add((effect: Effect) => {
-                effect.setTexture("depthSampler", depthTexture);
+                if(!this._depthTexture){
+                    BABYLON.Tools.Warn("No depth texture set on CircleOfConfusionPostProcess")
+                    return;
+                }
+                effect.setTexture("depthSampler", this._depthTexture);
                 
                 // Circle of confusion calculation, See https://developer.nvidia.com/gpugems/GPUGems/gpugems_ch23.html
                 var aperture = this.lensSize/this.fStop;
@@ -42,8 +48,15 @@ module BABYLON {
                 
                 effect.setFloat('focusDistance', this.focusDistance);
                 effect.setFloat('cocPrecalculation', cocPrecalculation);
-                effect.setFloat2('cameraMinMaxZ', depthTexture.activeCamera!.minZ, depthTexture.activeCamera!.maxZ);
+                effect.setFloat2('cameraMinMaxZ', this._depthTexture.activeCamera!.minZ, this._depthTexture.activeCamera!.maxZ);
             })
         }
+
+        /**
+         * Depth texture to be used to compute the circle of confusion. This must be set here or in the constructor in order for the post process to function.
+         */
+        public set depthTexture(value: RenderTargetTexture){
+            this._depthTexture = value;
+        }
     }
 }

+ 64 - 52
src/PostProcess/babylon.depthOfFieldEffect.ts

@@ -24,7 +24,12 @@ module BABYLON {
         private _circleOfConfusion: CircleOfConfusionPostProcess;
         private _depthOfFieldBlurX: Array<DepthOfFieldBlurPostProcess>;
         private _depthOfFieldBlurY: Array<DepthOfFieldBlurPostProcess>;
-        private _depthOfFieldMerge: DepthOfFieldMergePostProcess;
+        /**
+         * Private, last post process of dof
+         */
+        public _depthOfFieldMerge: DepthOfFieldMergePostProcess;
+
+        private _effects: Array<PostProcess> = [];
 
         /**
          * The focal the length of the camera used in the effect
@@ -66,64 +71,71 @@ module BABYLON {
         /**
          * Creates a new instance of @see DepthOfFieldEffect
          * @param scene The scene the effect belongs to.
-         * @param depthTexture The depth texture of the scene to compute the circle of confusion.
+         * @param depthTexture The depth texture of the scene to compute the circle of confusion.This must be set in order for this to function but may be set after initialization if needed.
          * @param pipelineTextureType The type of texture to be used when performing the post processing.
          */
-        constructor(scene: Scene, depthTexture: RenderTargetTexture, blurLevel: DepthOfFieldEffectBlurLevel = DepthOfFieldEffectBlurLevel.Low, pipelineTextureType = 0) {
+        constructor(scene: Scene, depthTexture: Nullable<RenderTargetTexture>, blurLevel: DepthOfFieldEffectBlurLevel = DepthOfFieldEffectBlurLevel.Low, pipelineTextureType = 0) {
             super(scene.getEngine(), "depth of field", ()=>{
-                // Circle of confusion value for each pixel is used to determine how much to blur that pixel
-                this._circleOfConfusion = new BABYLON.CircleOfConfusionPostProcess("circleOfConfusion", depthTexture, 1, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType);
-                // Capture circle of confusion texture
-                this._depthOfFieldPass = new PassPostProcess("depthOfFieldPass", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType);
-                this._depthOfFieldPass.autoClear = false;
+                return this._effects;
+            }, true);
+            // Circle of confusion value for each pixel is used to determine how much to blur that pixel
+            this._circleOfConfusion = new BABYLON.CircleOfConfusionPostProcess("circleOfConfusion", depthTexture, 1, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType);
+            // Capture circle of confusion texture
+            this._depthOfFieldPass = new PassPostProcess("depthOfFieldPass", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType);
+            this._depthOfFieldPass.autoClear = false;
 
-                // Create a pyramid of blurred images (eg. fullSize 1/4 blur, half size 1/2 blur, quarter size 3/4 blur, eith size 4/4 blur)
-                // Blur the image but do not blur on sharp far to near distance changes to avoid bleeding artifacts 
-                // See section 2.6.2 http://fileadmin.cs.lth.se/cs/education/edan35/lectures/12dof.pdf
-                this._depthOfFieldBlurY = []
-                this._depthOfFieldBlurX = []
-                var blurCount = 1;
-                var kernelSize = 15;
-                switch(blurLevel){
-                    case DepthOfFieldEffectBlurLevel.High: {
-                        blurCount = 3;
-                        kernelSize = 51;
-                        break;
-                    }
-                    case DepthOfFieldEffectBlurLevel.Medium: {
-                        blurCount = 2;
-                        kernelSize = 31;
-                        break;
-                    }
-                    default: {
-                        kernelSize = 15;
-                        blurCount = 1;
-                        break;
-                    }
+            // Create a pyramid of blurred images (eg. fullSize 1/4 blur, half size 1/2 blur, quarter size 3/4 blur, eith size 4/4 blur)
+            // Blur the image but do not blur on sharp far to near distance changes to avoid bleeding artifacts 
+            // See section 2.6.2 http://fileadmin.cs.lth.se/cs/education/edan35/lectures/12dof.pdf
+            this._depthOfFieldBlurY = []
+            this._depthOfFieldBlurX = []
+            var blurCount = 1;
+            var kernelSize = 15;
+            switch(blurLevel){
+                case DepthOfFieldEffectBlurLevel.High: {
+                    blurCount = 3;
+                    kernelSize = 51;
+                    break;
                 }
-                var adjustedKernelSize = kernelSize/Math.pow(2, blurCount-1);
-                for(var i = 0;i<blurCount;i++){
-                    var blurY = new DepthOfFieldBlurPostProcess("verticle blur", scene, new Vector2(0, 1.0), adjustedKernelSize, 1.0/Math.pow(2, i), null, this._depthOfFieldPass, i == 0 ? this._circleOfConfusion : null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType);
-                    blurY.autoClear = false;
-                    var blurX = new DepthOfFieldBlurPostProcess("horizontal blur", scene, new Vector2(1.0, 0), adjustedKernelSize, 1.0/Math.pow(2, i), null,  this._depthOfFieldPass, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType);
-                    blurX.autoClear = false;
-                    this._depthOfFieldBlurY.push(blurY);
-                    this._depthOfFieldBlurX.push(blurX);
+                case DepthOfFieldEffectBlurLevel.Medium: {
+                    blurCount = 2;
+                    kernelSize = 31;
+                    break;
                 }
-
-                // Merge blurred images with original image based on circleOfConfusion
-                this._depthOfFieldMerge = new DepthOfFieldMergePostProcess("depthOfFieldMerge", this._circleOfConfusion, this._depthOfFieldPass, this._depthOfFieldBlurY.slice(1), 1, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType);
-                this._depthOfFieldMerge.autoClear = false;
-                
-                // Set all post processes on the effect.
-                var effects= [this._circleOfConfusion, this._depthOfFieldPass];
-                for(var i=0;i<this._depthOfFieldBlurX.length;i++){
-                    effects.push(this._depthOfFieldBlurY[i]);
-                    effects.push(this._depthOfFieldBlurX[i]);
+                default: {
+                    kernelSize = 15;
+                    blurCount = 1;
+                    break;
                 }
-                effects.push(this._depthOfFieldMerge);
-                return effects;
-            }, true);
+            }
+            var adjustedKernelSize = kernelSize/Math.pow(2, blurCount-1);
+            for(var i = 0;i<blurCount;i++){
+                var blurY = new DepthOfFieldBlurPostProcess("verticle blur", scene, new Vector2(0, 1.0), adjustedKernelSize, 1.0/Math.pow(2, i), null, this._depthOfFieldPass, i == 0 ? this._circleOfConfusion : null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType);
+                blurY.autoClear = false;
+                var blurX = new DepthOfFieldBlurPostProcess("horizontal blur", scene, new Vector2(1.0, 0), adjustedKernelSize, 1.0/Math.pow(2, i), null,  this._depthOfFieldPass, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType);
+                blurX.autoClear = false;
+                this._depthOfFieldBlurY.push(blurY);
+                this._depthOfFieldBlurX.push(blurX);
+            }
+
+            // Merge blurred images with original image based on circleOfConfusion
+            this._depthOfFieldMerge = new DepthOfFieldMergePostProcess("depthOfFieldMerge", this._circleOfConfusion, this._depthOfFieldPass, this._depthOfFieldBlurY.slice(1), 1, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, pipelineTextureType);
+            this._depthOfFieldMerge.autoClear = false;
+            
+            // Set all post processes on the effect.
+            this._effects= [this._circleOfConfusion, this._depthOfFieldPass];
+            for(var i=0;i<this._depthOfFieldBlurX.length;i++){
+                this._effects.push(this._depthOfFieldBlurY[i]);
+                this._effects.push(this._depthOfFieldBlurX[i]);
+            }
+            this._effects.push(this._depthOfFieldMerge);
+        }
+
+        /**
+         * Depth texture to be used to compute the circle of confusion. This must be set here or in the constructor in order for the post process to function.
+         */
+        public set depthTexture(value: RenderTargetTexture){
+            this._circleOfConfusion.depthTexture = value;
         }
 
         /**

+ 39 - 0
src/PostProcess/babylon.imageProcessingPostProcess.ts

@@ -272,6 +272,45 @@
             this.imageProcessingConfiguration.vignetteEnabled = value;
         }
 
+        /**
+         * Gets wether the grain effect is enabled.
+         */
+        public get grainEnabled(): boolean {
+            return this.imageProcessingConfiguration.grainEnabled;
+        }
+        /**
+         * Sets wether the grain effect is enabled.
+         */
+        public set grainEnabled(value: boolean) {
+            this.imageProcessingConfiguration.grainEnabled = value;
+        }
+
+        /**
+         * Gets the grain effect's intensity.
+         */
+        public get grainIntensity(): number {
+            return this.imageProcessingConfiguration.grainIntensity;
+        }
+        /**
+         * Sets the grain effect's intensity.
+         */
+        public set grainIntensity(value: number) {
+            this.imageProcessingConfiguration.grainIntensity = value;
+        }
+
+        /**
+         * Gets wether the grain effect is animated.
+         */
+        public get grainAnimated(): boolean {
+            return this.imageProcessingConfiguration.grainAnimated;
+        }
+        /**
+         * Sets wether the grain effect is animated.
+         */
+        public set grainAnimated(value: boolean) {
+            this.imageProcessingConfiguration.grainAnimated = value;
+        }
+
         @serialize()
         private _fromLinearSpace = true;
         /**

+ 13 - 1
src/PostProcess/babylon.postProcess.ts

@@ -85,7 +85,7 @@
         private _parameters: string[];
         private _scaleRatio = new Vector2(1, 1);
         protected _indexParameters: any;
-        private _shareOutputWithPostProcess: PostProcess;
+        private _shareOutputWithPostProcess: Nullable<PostProcess>;
         private _texelSize = Vector2.Zero();
         private _forcedOutputTexture: InternalTexture;
 
@@ -297,6 +297,18 @@
         }
 
         /**
+         * Reverses the effect of calling shareOutputWith and returns the post process back to its original state. 
+         * This should be called if the post process that shares output with this post process is disabled/disposed.
+         */
+        public useOwnOutput() {
+            if(this._textures.length == 0){
+                this._textures = new SmartArray<InternalTexture>(2);
+            }
+
+            this._shareOutputWithPostProcess = null;
+        }
+
+        /**
          * Updates the effect with the current post process compile time values and recompiles the shader.
          * @param defines Define statements that should be added at the beginning of the shader. (default: null)
          * @param uniforms Set of uniform variables that will be passed to the shader. (default: null)

+ 1 - 1
src/Rendering/babylon.depthRenderer.ts

@@ -133,7 +133,7 @@
             var mesh = subMesh.getMesh();
 
             // Alpha test
-            if (material && material.needAlphaTesting()) {
+            if (material && material.needAlphaTesting() && material.getAlphaTestTexture()) {
                 defines.push("#define ALPHATEST");
                 if (mesh.isVerticesDataPresent(VertexBuffer.UVKind)) {
                     attribs.push(VertexBuffer.UVKind);