Browse Source

image processing to material integration

Sebastien Vandenberghe 8 năm trước cách đây
mục cha
commit
a8aa8d6269
26 tập tin đã thay đổi với 1517 bổ sung930 xóa
  1. 7 2
      Tools/Gulp/config.json
  2. 0 0
      materialsLibrary/src/legacyPBR/legacyColorCurves.fx
  3. 0 0
      materialsLibrary/src/legacyPBR/legacyColorCurvesDefinition.fx
  4. 0 0
      materialsLibrary/src/legacyPBR/legacyColorGrading.fx
  5. 0 0
      materialsLibrary/src/legacyPBR/legacyColorGradingDefinition.fx
  6. 18 5
      materialsLibrary/src/legacyPBR/legacyPbr.fragment.fx
  7. 116 245
      src/Materials/PBR/babylon.pbrBaseMaterial.ts
  8. 0 2
      src/Materials/PBR/babylon.pbrBaseSimpleMaterial.ts
  9. 157 59
      src/Materials/PBR/babylon.pbrMaterial.ts
  10. 0 42
      src/Materials/Textures/babylon.colorGradingTexture.ts
  11. 4 0
      src/Materials/babylon.effect.ts
  12. 357 0
      src/Materials/babylon.imageProcessing.ts
  13. 5 4
      src/Materials/babylon.materialHelper.ts
  14. 192 50
      src/Materials/babylon.standardMaterial.ts
  15. 257 148
      src/PostProcess/babylon.imageProcessingPostProcess.ts
  16. 31 1
      src/Shaders/ShadersInclude/helperFunctions.fx
  17. 24 0
      src/Shaders/ShadersInclude/imageProcessingDeclaration.fx
  18. 103 0
      src/Shaders/ShadersInclude/imageProcessingFunctions.fx
  19. 0 9
      src/Shaders/ShadersInclude/pbrFragmentDeclaration.fx
  20. 11 64
      src/Shaders/ShadersInclude/pbrFunctions.fx
  21. 0 3
      src/Shaders/ShadersInclude/pbrUboDeclaration.fx
  22. 9 13
      src/Shaders/default.fragment.fx
  23. 14 115
      src/Shaders/imageProcessing.fragment.fx
  24. 175 168
      src/Shaders/pbr.fragment.fx
  25. 21 0
      src/Tools/babylon.decorators.ts
  26. 16 0
      src/babylon.scene.ts

+ 7 - 2
Tools/Gulp/config.json

@@ -31,7 +31,8 @@
                 "multiMaterial", "pbrMaterial",
                 "multiMaterial", "pbrMaterial",
                 "meshBuilder", "layer",
                 "meshBuilder", "layer",
                 "additionalPostProcess_blur", "additionalPostProcess_fxaa", "additionalPostProcess_highlights", "additionalPostProcess_imageProcessing", "colorCurves", "defaultRenderingPipeline",
                 "additionalPostProcess_blur", "additionalPostProcess_fxaa", "additionalPostProcess_highlights", "additionalPostProcess_imageProcessing", "colorCurves", "defaultRenderingPipeline",
-                "debug", "textureTools"
+                "debug", "textureTools", "hdr",
+                "loader"
         ],
         ],
         "distributed": ["minimalViewer"]
         "distributed": ["minimalViewer"]
     },
     },
@@ -1166,7 +1167,11 @@
                     "../../materialsLibrary/src/legacyPBR/legacyPbrLightFunctions.fx",
                     "../../materialsLibrary/src/legacyPBR/legacyPbrLightFunctions.fx",
                     "../../materialsLibrary/src/legacyPBR/legacyPbrLightFunctionsCall.fx",
                     "../../materialsLibrary/src/legacyPBR/legacyPbrLightFunctionsCall.fx",
                     "../../materialsLibrary/src/legacyPBR/legacyPbrUboDeclaration.fx",
                     "../../materialsLibrary/src/legacyPBR/legacyPbrUboDeclaration.fx",
-                    "../../materialsLibrary/src/legacyPBR/legacyPbrVertexDeclaration.fx"
+                    "../../materialsLibrary/src/legacyPBR/legacyPbrVertexDeclaration.fx",
+                    "../../materialsLibrary/src/legacyPBR/legacyColorCurves.fx",
+                    "../../materialsLibrary/src/legacyPBR/legacyColorCurvesDefinition.fx",
+                    "../../materialsLibrary/src/legacyPBR/legacyColorGrading.fx",
+                    "../../materialsLibrary/src/legacyPBR/legacyColorGradingDefinition.fx",
                 ],
                 ],
                 "output": "babylon.legacyPbrMaterial.js"
                 "output": "babylon.legacyPbrMaterial.js"
             }
             }

src/Shaders/ShadersInclude/colorCurves.fx → materialsLibrary/src/legacyPBR/legacyColorCurves.fx


src/Shaders/ShadersInclude/colorCurvesDefinition.fx → materialsLibrary/src/legacyPBR/legacyColorCurvesDefinition.fx


src/Shaders/ShadersInclude/colorGrading.fx → materialsLibrary/src/legacyPBR/legacyColorGrading.fx


src/Shaders/ShadersInclude/colorGradingDefinition.fx → materialsLibrary/src/legacyPBR/legacyColorGradingDefinition.fx


+ 18 - 5
materialsLibrary/src/legacyPBR/legacyPbr.fragment.fx

@@ -105,11 +105,11 @@ varying vec3 vDirectionW;
 #endif
 #endif
 
 
 #ifdef CAMERACOLORGRADING
 #ifdef CAMERACOLORGRADING
-	#include<colorGradingDefinition>
+	#include<legacyColorGradingDefinition>
 #endif
 #endif
 
 
 #ifdef CAMERACOLORCURVES
 #ifdef CAMERACOLORCURVES
-	#include<colorCurvesDefinition>
+	#include<legacyColorCurvesDefinition>
 #endif
 #endif
 
 
 // PBR
 // PBR
@@ -117,17 +117,30 @@ varying vec3 vDirectionW;
 #include<legacyPbrFunctions>
 #include<legacyPbrFunctions>
 
 
 #ifdef CAMERACOLORGRADING
 #ifdef CAMERACOLORGRADING
-	#include<colorGrading>
+	#include<legacyColorGrading>
 #endif
 #endif
 
 
 #ifdef CAMERACOLORCURVES
 #ifdef CAMERACOLORCURVES
-	#include<colorCurves>
+	#include<legacyColorCurves>
 #endif
 #endif
 
 
 #include<harmonicsFunctions>
 #include<harmonicsFunctions>
 #include<legacyPbrLightFunctions>
 #include<legacyPbrLightFunctions>
 
 
-#include<helperFunctions>
+mat3 transposeMat3(mat3 inMatrix) {
+	vec3 i0 = inMatrix[0];
+	vec3 i1 = inMatrix[1];
+	vec3 i2 = inMatrix[2];
+
+	mat3 outMatrix = mat3(
+		vec3(i0.x, i1.x, i2.x),
+		vec3(i0.y, i1.y, i2.y),
+		vec3(i0.z, i1.z, i2.z)
+		);
+
+	return outMatrix;
+}
+
 #include<bumpFragmentFunctions>
 #include<bumpFragmentFunctions>
 #include<clipPlaneFragmentDeclaration>
 #include<clipPlaneFragmentDeclaration>
 #include<logDepthDeclaration>
 #include<logDepthDeclaration>

+ 116 - 245
src/Materials/PBR/babylon.pbrBaseMaterial.ts

@@ -1,5 +1,5 @@
 module BABYLON {
 module BABYLON {
-    class PBRMaterialDefines extends MaterialDefines {
+    class PBRMaterialDefines extends MaterialDefines implements IImageProcessingDefines {
         public PBR = true;
         public PBR = true;
         public ALBEDO = false;
         public ALBEDO = false;
         public AMBIENT = false;
         public AMBIENT = false;
@@ -20,9 +20,6 @@
         public POINTSIZE = false;
         public POINTSIZE = false;
         public FOG = false;
         public FOG = false;
         public SPECULARTERM = false;
         public SPECULARTERM = false;
-        public OPACITYFRESNEL = false;
-        public EMISSIVEFRESNEL = false;
-        public FRESNEL = false;
         public NORMAL = false;
         public NORMAL = false;
         public TANGENT = false;
         public TANGENT = false;
         public UV1 = false;
         public UV1 = false;
@@ -34,7 +31,6 @@
         public INSTANCES = false;
         public INSTANCES = false;
         public MICROSURFACEFROMREFLECTIVITYMAP = false;
         public MICROSURFACEFROMREFLECTIVITYMAP = false;
         public MICROSURFACEAUTOMATIC = false;
         public MICROSURFACEAUTOMATIC = false;
-        public EMISSIVEASILLUMINATION = false;
         public LIGHTMAP = false;
         public LIGHTMAP = false;
         public USELIGHTMAPASSHADOWMAP = false;
         public USELIGHTMAPASSHADOWMAP = false;
         public REFLECTIONMAP_3D = false;
         public REFLECTIONMAP_3D = false;
@@ -49,10 +45,6 @@
         public REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED = false;
         public REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED = false;
         public INVERTCUBICMAP = false;
         public INVERTCUBICMAP = false;
         public LOGARITHMICDEPTH = false;
         public LOGARITHMICDEPTH = false;
-        public CAMERATONEMAP = false;
-        public CAMERACONTRAST = false;
-        public CAMERACOLORGRADING = false;
-        public CAMERACOLORCURVES = false;
         public USESPHERICALFROMREFLECTIONMAP = false;
         public USESPHERICALFROMREFLECTIONMAP = false;
         public REFRACTION = false;
         public REFRACTION = false;
         public REFRACTIONMAP_3D = false;
         public REFRACTIONMAP_3D = false;
@@ -84,7 +76,19 @@
         public NUM_MORPH_INFLUENCERS = 0;
         public NUM_MORPH_INFLUENCERS = 0;
         
         
         public ALPHATESTVALUE = 0.5;
         public ALPHATESTVALUE = 0.5;
-        public LDROUTPUT = true;
+        public PREMULTIPLYALPHA = false;
+        public ALPHAFRESNEL = false;
+
+        public IMAGEPROCESSING = false;
+        public VIGNETTE = false;
+        public VIGNETTEBLENDMODEMULTIPLY = false;
+        public VIGNETTEBLENDMODEOPAQUE = false;
+        public TONEMAPPING = false;
+        public CONTRAST = false;
+        public COLORCURVES = false;
+        public COLORGRADING = false;
+
+        public HDRLINEAROUTPUT = false;
 
 
         constructor() {
         constructor() {
             super();
             super();
@@ -139,37 +143,6 @@
         protected _disableBumpMap: boolean = false;
         protected _disableBumpMap: boolean = false;
 
 
         /**
         /**
-         * The camera exposure used on this material.
-         * This property is here and not in the camera to allow controlling exposure without full screen post process.
-         * This corresponds to a photographic exposure.
-         */
-        protected _cameraExposure: number = 1.0;
-        
-        /**
-         * The camera contrast used on this material.
-         * This property is here and not in the camera to allow controlling contrast without full screen post process.
-         */
-        protected _cameraContrast: number = 1.0;
-        
-        /**
-         * Color Grading 2D Lookup Texture.
-         * This allows special effects like sepia, black and white to sixties rendering style. 
-         */
-        protected _cameraColorGradingTexture: BaseTexture = null;
-        
-        /**
-         * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). 
-         * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects.
-         * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; 
-         * corresponding to low luminance, medium luminance, and high luminance areas respectively.
-         */
-        protected _cameraColorCurves: ColorCurves = null;
-         
-        private _cameraInfos: Vector4 = new Vector4(1.0, 1.0, 0.0, 0.0);
-
-        private _microsurfaceTextureLods: Vector2 = new Vector2(0.0, 0.0);
-
-        /**
          * AKA Diffuse Texture in standard nomenclature.
          * AKA Diffuse Texture in standard nomenclature.
          */
          */
         protected _albedoTexture: BaseTexture;
         protected _albedoTexture: BaseTexture;
@@ -188,6 +161,8 @@
 
 
         protected _reflectionTexture: BaseTexture;
         protected _reflectionTexture: BaseTexture;
 
 
+        private _microsurfaceTextureLods: Vector2 = new Vector2(0.0, 0.0);
+
         protected _emissiveTexture: BaseTexture;
         protected _emissiveTexture: BaseTexture;
         
         
         /**
         /**
@@ -255,10 +230,6 @@
          */
          */
         protected _invertRefractionY = false;
         protected _invertRefractionY = false;
 
 
-        protected _opacityFresnelParameters: FresnelParameters;
-
-        protected _emissiveFresnelParameters: FresnelParameters;
-
         /**
         /**
          * This parameters will make the material used its opacity to control how much it is refracting aginst not.
          * This parameters will make the material used its opacity to control how much it is refracting aginst not.
          * Materials half opaque for instance using refraction could benefit from this control.
          * Materials half opaque for instance using refraction could benefit from this control.
@@ -268,12 +239,6 @@
         protected _useLightmapAsShadowmap = false;
         protected _useLightmapAsShadowmap = false;
         
         
         /**
         /**
-         * In this mode, the emissive informtaion will always be added to the lighting once.
-         * A light for instance can be thought as emissive.
-         */
-        protected _useEmissiveAsIllumination = false;
-        
-        /**
          * Secifies that the alpha is coming form the albedo channel alpha channel.
          * Secifies that the alpha is coming form the albedo channel alpha channel.
          */
          */
         protected _useAlphaFromAlbedoTexture = false;
         protected _useAlphaFromAlbedoTexture = false;
@@ -321,12 +286,6 @@
         protected _useAutoMicroSurfaceFromReflectivityMap = false;
         protected _useAutoMicroSurfaceFromReflectivityMap = false;
         
         
         /**
         /**
-         * Allows to work with scalar in linear mode. This is definitely a matter of preferences and tools used during
-         * the creation of the material.
-         */
-        protected _useScalarInLinearSpace = false;
-        
-        /**
          * BJS is using an harcoded light falloff based on a manually sets up range.
          * BJS is using an harcoded light falloff based on a manually sets up range.
          * In PBR, one way to represents the fallof is to use the inverse squared root algorythm.
          * In PBR, one way to represents the fallof is to use the inverse squared root algorythm.
          * This parameter can help you switch back to the BJS mode in order to create scenes using both materials.
          * This parameter can help you switch back to the BJS mode in order to create scenes using both materials.
@@ -390,10 +349,61 @@
         protected _forceAlphaTest = false;
         protected _forceAlphaTest = false;
 
 
         /**
         /**
-         * If false, it allows the output of the shader to be in hdr space (e.g. more than one) which is useful
-         * in combination of post process in float or half float mode.
+         * Secifies that the alpha is premultiplied before output (this enables alpha premultiplied blending).
+         * in your scene composition.
+         */
+        protected _premultiplyAlpha = false;
+
+        /**
+         * A fresnel is applied to the alpha of the model to ensure grazing angles edges are not alpha tested.
+         * And/Or ocllude the blended part.
          */
          */
-        protected _ldrOutput = true;
+        protected _useAlphaFresnel = false;
+
+        @serialize()
+        protected _hdrLinearOutput = false;
+
+        /**
+         * Default configuration related to image processing available in the PBR Material.
+         */
+        @serializeAsImageProcessing()
+        protected _imageProcessingConfiguration: ImageProcessing;
+
+        /**
+         * Keep track of the image processing observer to allow dispose and replace.
+         */
+        private _imageProcessingObserver: Observer<ImageProcessing>;
+
+        /**
+         * Attaches a new image processing configuration to the PBR Material.
+         * @param configuration 
+         */
+        protected _attachImageProcessingConfiguration(configuration: ImageProcessing): void {
+            if (configuration === this._imageProcessingConfiguration) {
+                return;
+            }
+
+            // Detaches observer.
+            if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
+                this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
+            }
+
+            // Pick the scene configuration if needed.
+            if (!configuration) {
+                this._imageProcessingConfiguration = this.getScene().imageProcessingConfiguration;
+            }
+            else {
+                this._imageProcessingConfiguration = configuration;
+            }
+
+            // Attaches observer.
+            this._imageProcessingObserver = this._imageProcessingConfiguration.onUpdateParameters.add(conf => {
+                this._markAllSubMeshesAsTexturesDirty();
+            });
+
+            // Ensure the effect will be rebuilt.
+            this._markAllSubMeshesAsTexturesDirty();
+        }
 
 
         private _renderTargets = new SmartArray<RenderTargetTexture>(16);
         private _renderTargets = new SmartArray<RenderTargetTexture>(16);
         private _worldViewProjectionMatrix = Matrix.Zero();
         private _worldViewProjectionMatrix = Matrix.Zero();
@@ -411,6 +421,9 @@
         constructor(name: string, scene: Scene) {
         constructor(name: string, scene: Scene) {
             super(name, scene);
             super(name, scene);
 
 
+            // Setup the default processing configuration to the scene.
+            this._attachImageProcessingConfiguration(null);
+
             this.getRenderTargetTextures = (): SmartArray<RenderTargetTexture> => {
             this.getRenderTargetTextures = (): SmartArray<RenderTargetTexture> => {
                 this._renderTargets.reset();
                 this._renderTargets.reset();
 
 
@@ -441,7 +454,7 @@
             if (this._linkRefractionWithTransparency) {
             if (this._linkRefractionWithTransparency) {
                 return false;
                 return false;
             }
             }
-            return (this.alpha < 1.0) || (this._opacityTexture != null) || this._shouldUseAlphaFromAlbedoTexture() || this._opacityFresnelParameters && this._opacityFresnelParameters.isEnabled;
+            return (this.alpha < 1.0) || (this._opacityTexture != null) || this._shouldUseAlphaFromAlbedoTexture();
         }
         }
 
 
         public needAlphaTesting(): boolean {
         public needAlphaTesting(): boolean {
@@ -459,60 +472,7 @@
             return this._albedoTexture;
             return this._albedoTexture;
         }
         }
 
 
-        private convertColorToLinearSpaceToRef(color: Color3, ref: Color3): void {
-            PBRMaterial.convertColorToLinearSpaceToRef(color, ref, this._useScalarInLinearSpace);
-        }
-
-        private static convertColorToLinearSpaceToRef(color: Color3, ref: Color3, useScalarInLinear: boolean): void {
-            if (!useScalarInLinear) {
-                color.toLinearSpaceToRef(ref);
-            } else {
-                ref.r = color.r;
-                ref.g = color.g;
-                ref.b = color.b;
-            }
-        }
-
-        private static _scaledAlbedo = new Color3();
         private static _scaledReflectivity = new Color3();
         private static _scaledReflectivity = new Color3();
-        private static _scaledEmissive = new Color3();
-        private static _scaledReflection = new Color3();
-
-        public static BindLights(scene: Scene, mesh: AbstractMesh, effect: Effect, defines: MaterialDefines, useScalarInLinearSpace: boolean, maxSimultaneousLights: number, usePhysicalLightFalloff: boolean) {
-            var lightIndex = 0;
-            for (var light of mesh._lightSources) {
-                let useUbo = light._uniformBuffer.useUbo;
-                let scaledIntensity = light.getScaledIntensity();
-
-                light._uniformBuffer.bindToEffect(effect, "Light" + lightIndex);
-                MaterialHelper.BindLightProperties(light, effect, lightIndex);
-
-                // GAMMA CORRECTION.
-                this.convertColorToLinearSpaceToRef(light.diffuse, PBRMaterial._scaledAlbedo, useScalarInLinearSpace);
-
-                PBRMaterial._scaledAlbedo.scaleToRef(scaledIntensity, PBRMaterial._scaledAlbedo);
-                light._uniformBuffer.updateColor4(useUbo ? "vLightDiffuse" : "vLightDiffuse" + lightIndex, PBRMaterial._scaledAlbedo, usePhysicalLightFalloff ? light.radius : light.range);
-
-                if (defines["SPECULARTERM"]) {
-                    this.convertColorToLinearSpaceToRef(light.specular, PBRMaterial._scaledReflectivity, useScalarInLinearSpace);
-
-                    PBRMaterial._scaledReflectivity.scaleToRef(scaledIntensity, PBRMaterial._scaledReflectivity);
-                    light._uniformBuffer.updateColor3(useUbo ? "vLightSpecular" : "vLightSpecular" + lightIndex, PBRMaterial._scaledReflectivity);
-                }
-
-                // Shadows
-                if (scene.shadowsEnabled) {
-                    MaterialHelper.BindLightShadow(light, scene, mesh, lightIndex + "", effect);
-                }
-
-                light._uniformBuffer.update();
-
-                lightIndex++;
-
-                if (lightIndex === maxSimultaneousLights)
-                    break;
-            }
-        }
 
 
         public isReadyForSubMesh(mesh: AbstractMesh, subMesh: SubMesh, useInstances?: boolean): boolean { 
         public isReadyForSubMesh(mesh: AbstractMesh, subMesh: SubMesh, useInstances?: boolean): boolean { 
             if (this.isFrozen) {
             if (this.isFrozen) {
@@ -547,8 +507,6 @@
                         defines.LODBASEDMICROSFURACE = true;
                         defines.LODBASEDMICROSFURACE = true;
                     }
                     }
 
 
-                    defines.LDROUTPUT = this._ldrOutput;
-
                     if (this._albedoTexture && StandardMaterial.DiffuseTextureEnabled) {
                     if (this._albedoTexture && StandardMaterial.DiffuseTextureEnabled) {
                         if (!this._albedoTexture.isReadyOrNotBlocking()) {
                         if (!this._albedoTexture.isReadyOrNotBlocking()) {
                             return false;
                             return false;
@@ -741,79 +699,45 @@
                             }
                             }
                         }
                         }
                     }
                     }
-                
-                    if (this._cameraColorGradingTexture && StandardMaterial.ColorGradingTextureEnabled) {
-                        // Color Grading texure can not be none blocking.
-                        if (!this._cameraColorGradingTexture.isReady()) {
-                            return false;
-                        }
-                        
-                        defines.CAMERACOLORGRADING = true;
-                    }
-
-                    if (!this.backFaceCulling && this._twoSidedLighting) {
-                        defines.TWOSIDEDLIGHTING = true;
-                    }
 
 
                     if (this._shouldUseAlphaFromAlbedoTexture()) {
                     if (this._shouldUseAlphaFromAlbedoTexture()) {
                         defines.ALPHAFROMALBEDO = true;
                         defines.ALPHAFROMALBEDO = true;
                     }
                     }
 
 
-                    if (this._useEmissiveAsIllumination) {
-                        defines.EMISSIVEASILLUMINATION = true;
-                    }
-
-                    if (this._cameraContrast != 1) {
-                        defines.CAMERACONTRAST = true;
-                    }
-
-                    if (this._cameraExposure != 1) {
-                        defines.CAMERATONEMAP = true;
-                    }
-                    
-                    if (this._cameraColorCurves) {
-                        defines.CAMERACOLORCURVES = true;
-                    }
-
-                    if (this._useSpecularOverAlpha) {
-                        defines.SPECULAROVERALPHA = true;
-                    }
-
-                    if (this._usePhysicalLightFalloff) {
-                        defines.USEPHYSICALLIGHTFALLOFF = true;
-                    }
+                }
 
 
-                    if (this._useRadianceOverAlpha) {
-                        defines.RADIANCEOVERALPHA = true;
-                    }
+                if (this._useSpecularOverAlpha) {
+                    defines.SPECULAROVERALPHA = true;
+                }
 
 
-                    if ((this._metallic !== undefined && this._metallic !== null) || (this._roughness !== undefined && this._roughness !== null)) {
-                        defines.METALLICWORKFLOW = true;
-                    }   
+                if (this._usePhysicalLightFalloff) {
+                    defines.USEPHYSICALLIGHTFALLOFF = true;
+                }
 
 
-                    defines.ALPHATESTVALUE = this._alphaCutOff;
-                    defines.ALPHABLEND = this.needAlphaBlending();
+                if (this._useRadianceOverAlpha) {
+                    defines.RADIANCEOVERALPHA = true;
                 }
                 }
-            }
 
 
-            if (defines._areFresnelDirty) {
-                if (StandardMaterial.FresnelEnabled) {
-                    // Fresnel
-                    if (this._opacityFresnelParameters && this._opacityFresnelParameters.isEnabled ||
-                        this._emissiveFresnelParameters && this._emissiveFresnelParameters.isEnabled) {
+                if ((this._metallic !== undefined && this._metallic !== null) || (this._roughness !== undefined && this._roughness !== null)) {
+                    defines.METALLICWORKFLOW = true;
+                }
 
 
-                        if (this._opacityFresnelParameters && this._opacityFresnelParameters.isEnabled) {
-                            defines.OPACITYFRESNEL = true;
-                        }
+                if (!this.backFaceCulling && this._twoSidedLighting) {
+                    defines.TWOSIDEDLIGHTING = true;
+                }
 
 
-                        if (this._emissiveFresnelParameters && this._emissiveFresnelParameters.isEnabled) {
-                            defines.EMISSIVEFRESNEL = true;
-                        }
+                defines.ALPHATESTVALUE = this._alphaCutOff;
+                defines.HDRLINEAROUTPUT = this._hdrLinearOutput;
+                defines.PREMULTIPLYALPHA = this._premultiplyAlpha;
+                defines.ALPHABLEND = this.needAlphaBlending();
+                defines.ALPHAFRESNEL = this._useAlphaFresnel;
 
 
-                        defines.FRESNEL = true;
-                    }
+                if (!this._imageProcessingConfiguration.isReady()) {
+                    return false;
                 }
                 }
-            }            
+
+                this._imageProcessingConfiguration.prepareDefines(defines);
+            }
 
 
             // Misc.
             // Misc.
             MaterialHelper.PrepareDefinesForMisc(mesh, scene, this._useLogarithmicDepth, this.pointsCloud, this.fogEnabled, defines);
             MaterialHelper.PrepareDefinesForMisc(mesh, scene, this._useLogarithmicDepth, this.pointsCloud, this.fogEnabled, defines);
@@ -831,7 +755,7 @@
                 }
                 }
             }
             }
 
 
-            // Get correct effect      
+            // Get correct effect
             if (defines.isDirty) {
             if (defines.isDirty) {
                 defines.markAsProcessed();
                 defines.markAsProcessed();
                 scene.resetCachedMaterial();
                 scene.resetCachedMaterial();
@@ -884,18 +808,6 @@
                     fallbacks.addFallback(0, "SPECULARTERM");
                     fallbacks.addFallback(0, "SPECULARTERM");
                 }
                 }
 
 
-                if (defines.OPACITYFRESNEL) {
-                    fallbacks.addFallback(1, "OPACITYFRESNEL");
-                }
-
-                if (defines.EMISSIVEFRESNEL) {
-                    fallbacks.addFallback(2, "EMISSIVEFRESNEL");
-                }
-
-                if (defines.FRESNEL) {
-                    fallbacks.addFallback(3, "FRESNEL");
-                }
-
                 if (defines.NUM_BONE_INFLUENCERS > 0) {
                 if (defines.NUM_BONE_INFLUENCERS > 0) {
                     fallbacks.addCPUSkinningFallback(0, mesh);
                     fallbacks.addCPUSkinningFallback(0, mesh);
                 }
                 }
@@ -932,25 +844,20 @@
                         "vAlbedoInfos", "vAmbientInfos", "vOpacityInfos", "vReflectionInfos", "vEmissiveInfos", "vReflectivityInfos", "vMicroSurfaceSamplerInfos", "vBumpInfos", "vLightmapInfos", "vRefractionInfos",
                         "vAlbedoInfos", "vAmbientInfos", "vOpacityInfos", "vReflectionInfos", "vEmissiveInfos", "vReflectivityInfos", "vMicroSurfaceSamplerInfos", "vBumpInfos", "vLightmapInfos", "vRefractionInfos",
                         "mBones",
                         "mBones",
                         "vClipPlane", "albedoMatrix", "ambientMatrix", "opacityMatrix", "reflectionMatrix", "emissiveMatrix", "reflectivityMatrix", "microSurfaceSamplerMatrix", "bumpMatrix", "lightmapMatrix", "refractionMatrix",
                         "vClipPlane", "albedoMatrix", "ambientMatrix", "opacityMatrix", "reflectionMatrix", "emissiveMatrix", "reflectivityMatrix", "microSurfaceSamplerMatrix", "bumpMatrix", "lightmapMatrix", "refractionMatrix",
-                        "opacityParts", "emissiveLeftColor", "emissiveRightColor",
                         "vLightingIntensity",
                         "vLightingIntensity",
                         "logarithmicDepthConstant",
                         "logarithmicDepthConstant",
                         "vSphericalX", "vSphericalY", "vSphericalZ",
                         "vSphericalX", "vSphericalY", "vSphericalZ",
                         "vSphericalXX", "vSphericalYY", "vSphericalZZ",
                         "vSphericalXX", "vSphericalYY", "vSphericalZZ",
                         "vSphericalXY", "vSphericalYZ", "vSphericalZX",
                         "vSphericalXY", "vSphericalYZ", "vSphericalZX",
-                        "vMicrosurfaceTextureLods",
-                        "vCameraInfos"
+                        "vMicrosurfaceTextureLods"
                 ];
                 ];
 
 
                 var samplers = ["albedoSampler", "ambientSampler", "opacitySampler", "reflectionCubeSampler", "reflection2DSampler", "emissiveSampler", "reflectivitySampler", "microSurfaceSampler", "bumpSampler", "lightmapSampler", "refractionCubeSampler", "refraction2DSampler"];
                 var samplers = ["albedoSampler", "ambientSampler", "opacitySampler", "reflectionCubeSampler", "reflection2DSampler", "emissiveSampler", "reflectivitySampler", "microSurfaceSampler", "bumpSampler", "lightmapSampler", "refractionCubeSampler", "refraction2DSampler"];
                 var uniformBuffers = ["Material", "Scene"];
                 var uniformBuffers = ["Material", "Scene"];
 
 
-                if (defines.CAMERACOLORCURVES) {
-                    ColorCurves.PrepareUniforms(uniforms);
-                }
-                if (defines.CAMERACOLORGRADING) {
-                    ColorGradingTexture.PrepareUniformsAndSamplers(uniforms, samplers);
-                }
+                ImageProcessing.PrepareUniforms(uniforms, defines);
+                ImageProcessing.PrepareSamplers(samplers, defines);
+
                 MaterialHelper.PrepareUniformsAndSamplersList(<EffectCreationOptions>{
                 MaterialHelper.PrepareUniformsAndSamplersList(<EffectCreationOptions>{
                     uniformsNames: uniforms, 
                     uniformsNames: uniforms, 
                     uniformBuffersNames: uniformBuffers,
                     uniformBuffersNames: uniformBuffers,
@@ -1024,9 +931,6 @@
             this._uniformBuffer.addUniform("vMicrosurfaceTextureLods", 2);
             this._uniformBuffer.addUniform("vMicrosurfaceTextureLods", 2);
             this._uniformBuffer.addUniform("vReflectivityColor", 4);
             this._uniformBuffer.addUniform("vReflectivityColor", 4);
             this._uniformBuffer.addUniform("vEmissiveColor", 3);
             this._uniformBuffer.addUniform("vEmissiveColor", 3);
-            this._uniformBuffer.addUniform("opacityParts", 4);
-            this._uniformBuffer.addUniform("emissiveLeftColor", 4);
-            this._uniformBuffer.addUniform("emissiveRightColor", 4);
 
 
             this._uniformBuffer.addUniform("pointSize", 1);
             this._uniformBuffer.addUniform("pointSize", 1);
             this._uniformBuffer.create();
             this._uniformBuffer.create();
@@ -1073,18 +977,6 @@
 
 
                 if (!this._uniformBuffer.useUbo || !this.isFrozen || !this._uniformBuffer.isSync) {
                 if (!this._uniformBuffer.useUbo || !this.isFrozen || !this._uniformBuffer.isSync) {
 
 
-                    // Fresnel
-                    if (StandardMaterial.FresnelEnabled) {
-                        if (this._opacityFresnelParameters && this._opacityFresnelParameters.isEnabled) {
-                            this._uniformBuffer.updateColor4("opacityParts", new Color3(this._opacityFresnelParameters.leftColor.toLuminance(), this._opacityFresnelParameters.rightColor.toLuminance(), this._opacityFresnelParameters.bias), this._opacityFresnelParameters.power);
-                        }
-
-                        if (this._emissiveFresnelParameters && this._emissiveFresnelParameters.isEnabled) {
-                            this._uniformBuffer.updateColor4("emissiveLeftColor", this._emissiveFresnelParameters.leftColor, this._emissiveFresnelParameters.power);
-                            this._uniformBuffer.updateColor4("emissiveRightColor", this._emissiveFresnelParameters.rightColor, this._emissiveFresnelParameters.bias);
-                        }
-                    }
-
                     // Texture uniforms      
                     // Texture uniforms      
                     if (scene.texturesEnabled) {
                     if (scene.texturesEnabled) {
                         if (this._albedoTexture && StandardMaterial.DiffuseTextureEnabled) {
                         if (this._albedoTexture && StandardMaterial.DiffuseTextureEnabled) {
@@ -1201,22 +1093,12 @@
                         this._uniformBuffer.updateColor4("vReflectivityColor", PBRMaterial._scaledReflectivity, 0);
                         this._uniformBuffer.updateColor4("vReflectivityColor", PBRMaterial._scaledReflectivity, 0);
                     }
                     }
                     else {
                     else {
-                        // GAMMA CORRECTION.
-                        this.convertColorToLinearSpaceToRef(this._reflectivityColor, PBRMaterial._scaledReflectivity);
-                        this._uniformBuffer.updateColor4("vReflectivityColor", PBRMaterial._scaledReflectivity, this._microSurface);
+                        this._uniformBuffer.updateColor4("vReflectivityColor", this._reflectivityColor, this._microSurface);
                     }
                     }
 
 
-                    // GAMMA CORRECTION.
-                    this.convertColorToLinearSpaceToRef(this._emissiveColor, PBRMaterial._scaledEmissive);
-                    this._uniformBuffer.updateColor3("vEmissiveColor", PBRMaterial._scaledEmissive);
-
-                    // GAMMA CORRECTION.
-                    this.convertColorToLinearSpaceToRef(this._reflectionColor, PBRMaterial._scaledReflection);
-                    this._uniformBuffer.updateColor3("vReflectionColor", PBRMaterial._scaledReflection);
-
-                    // GAMMA CORRECTION.
-                    this.convertColorToLinearSpaceToRef(this._albedoColor, PBRMaterial._scaledAlbedo);
-                    this._uniformBuffer.updateColor4("vAlbedoColor", PBRMaterial._scaledAlbedo, this.alpha * mesh.visibility);
+                    this._uniformBuffer.updateColor3("vEmissiveColor", this._emissiveColor);
+                    this._uniformBuffer.updateColor3("vReflectionColor", this._reflectionColor);
+                    this._uniformBuffer.updateColor4("vAlbedoColor", this._albedoColor, this.alpha * mesh.visibility);
 
 
 
 
                     // Misc
                     // Misc
@@ -1282,10 +1164,6 @@
                             this._uniformBuffer.setTexture("refraction2DSampler", this._refractionTexture);
                             this._uniformBuffer.setTexture("refraction2DSampler", this._refractionTexture);
                         }
                         }
                     }
                     }
-
-                    if (this._cameraColorGradingTexture && StandardMaterial.ColorGradingTextureEnabled) {
-                        ColorGradingTexture.Bind(this._cameraColorGradingTexture, this._activeEffect);
-                    }
                 }
                 }
 
 
                 // Clip plane
                 // Clip plane
@@ -1301,7 +1179,7 @@
             if (this._mustRebind(scene, effect) || !this.isFrozen) {
             if (this._mustRebind(scene, effect) || !this.isFrozen) {
                 // Lights
                 // Lights
                 if (scene.lightsEnabled && !this._disableLighting) {
                 if (scene.lightsEnabled && !this._disableLighting) {
-                    PBRMaterial.BindLights(scene, mesh, this._activeEffect, defines, this._useScalarInLinearSpace, this._maxSimultaneousLights, this._usePhysicalLightFalloff);
+                    MaterialHelper.BindLights(scene, mesh, this._activeEffect, defines, this._maxSimultaneousLights, this._usePhysicalLightFalloff);
                 }
                 }
 
 
                 // View
                 // View
@@ -1317,12 +1195,9 @@
                     MaterialHelper.BindMorphTargetParameters(mesh, this._activeEffect);
                     MaterialHelper.BindMorphTargetParameters(mesh, this._activeEffect);
                 }
                 }
 
 
-                this._cameraInfos.x = this._cameraExposure;
-                this._cameraInfos.y = this._cameraContrast;
-                effect.setVector4("vCameraInfos", this._cameraInfos);
-                
-                if (this._cameraColorCurves) {
-                    ColorCurves.Bind(this._cameraColorCurves, this._activeEffect);
+                // image processing
+                if (this._imageProcessingConfiguration) {
+                    this._imageProcessingConfiguration.bind(this._activeEffect);
                 }
                 }
 
 
                 // Log. depth
                 // Log. depth
@@ -1377,10 +1252,6 @@
             if (this._refractionTexture && this._refractionTexture.animations && this._refractionTexture.animations.length > 0) {
             if (this._refractionTexture && this._refractionTexture.animations && this._refractionTexture.animations.length > 0) {
                 results.push(this._refractionTexture);
                 results.push(this._refractionTexture);
             }
             }
-            
-            if (this._cameraColorGradingTexture && this._cameraColorGradingTexture.animations && this._cameraColorGradingTexture.animations.length > 0) {
-                results.push(this._cameraColorGradingTexture);
-            }
 
 
             return results;
             return results;
         }
         }
@@ -1426,14 +1297,14 @@
                 if (this._refractionTexture) {
                 if (this._refractionTexture) {
                     this._refractionTexture.dispose();
                     this._refractionTexture.dispose();
                 }
                 }
-                
-                if (this._cameraColorGradingTexture) {
-                    this._cameraColorGradingTexture.dispose();
-                }
             }
             }
 
 
             this._renderTargets.dispose();
             this._renderTargets.dispose();
 
 
+            if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
+                this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
+            }
+
             super.dispose(forceDisposeEffect, forceDisposeTextures);
             super.dispose(forceDisposeEffect, forceDisposeTextures);
         }
         }
     }
     }

+ 0 - 2
src/Materials/PBR/babylon.pbrBaseSimpleMaterial.ts

@@ -171,9 +171,7 @@
         constructor(name: string, scene: Scene) {
         constructor(name: string, scene: Scene) {
             super(name, scene);
             super(name, scene);
 
 
-            this._useEmissiveAsIllumination = true;
             this._useAmbientInGrayScale = true;
             this._useAmbientInGrayScale = true;
-            this._useScalarInLinearSpace = true;
         }
         }
     }
     }
 }
 }

+ 157 - 59
src/Materials/PBR/babylon.pbrMaterial.ts

@@ -80,41 +80,6 @@
         public disableBumpMap: boolean = false;
         public disableBumpMap: boolean = false;
 
 
         /**
         /**
-         * The camera exposure used on this material.
-         * This property is here and not in the camera to allow controlling exposure without full screen post process.
-         * This corresponds to a photographic exposure.
-         */
-        @serialize()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public cameraExposure: number = 1.0;
-        
-        /**
-         * The camera contrast used on this material.
-         * This property is here and not in the camera to allow controlling contrast without full screen post process.
-         */
-        @serialize()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public cameraContrast: number = 1.0;
-        
-        /**
-         * Color Grading 2D Lookup Texture.
-         * This allows special effects like sepia, black and white to sixties rendering style. 
-         */
-        @serializeAsTexture()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public cameraColorGradingTexture: BaseTexture = null;
-        
-        /**
-         * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). 
-         * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects.
-         * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; 
-         * corresponding to low luminance, medium luminance, and high luminance areas respectively.
-         */
-        @serializeAsColorCurves()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public cameraColorCurves: ColorCurves = null;
-
-        /**
          * AKA Diffuse Texture in standard nomenclature.
          * AKA Diffuse Texture in standard nomenclature.
          */
          */
         @serializeAsTexture()
         @serializeAsTexture()
@@ -244,14 +209,6 @@
         @expandToProperty("_markAllSubMeshesAsTexturesDirty")
         @expandToProperty("_markAllSubMeshesAsTexturesDirty")
         public invertRefractionY = false;
         public invertRefractionY = false;
 
 
-        @serializeAsFresnelParameters()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public opacityFresnelParameters: FresnelParameters;
-
-        @serializeAsFresnelParameters()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public emissiveFresnelParameters: FresnelParameters;
-
         /**
         /**
          * This parameters will make the material used its opacity to control how much it is refracting aginst not.
          * This parameters will make the material used its opacity to control how much it is refracting aginst not.
          * Materials half opaque for instance using refraction could benefit from this control.
          * Materials half opaque for instance using refraction could benefit from this control.
@@ -265,14 +222,6 @@
         public useLightmapAsShadowmap = false;
         public useLightmapAsShadowmap = false;
         
         
         /**
         /**
-         * In this mode, the emissive informtaion will always be added to the lighting once.
-         * A light for instance can be thought as emissive.
-         */
-        @serialize()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public useEmissiveAsIllumination = false;
-        
-        /**
          * Secifies that the alpha is coming form the albedo channel alpha channel.
          * Secifies that the alpha is coming form the albedo channel alpha channel.
          */
          */
         @serialize()
         @serialize()
@@ -338,14 +287,6 @@
         public useAutoMicroSurfaceFromReflectivityMap = false;
         public useAutoMicroSurfaceFromReflectivityMap = false;
         
         
         /**
         /**
-         * Allows to work with scalar in linear mode. This is definitely a matter of preferences and tools used during
-         * the creation of the material.
-         */
-        @serialize()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public useScalarInLinearSpace = false;
-        
-        /**
          * BJS is using an harcoded light falloff based on a manually sets up range.
          * BJS is using an harcoded light falloff based on a manually sets up range.
          * In PBR, one way to represents the fallof is to use the inverse squared root algorythm.
          * In PBR, one way to represents the fallof is to use the inverse squared root algorythm.
          * This parameter can help you switch back to the BJS mode in order to create scenes using both materials.
          * This parameter can help you switch back to the BJS mode in order to create scenes using both materials.
@@ -419,6 +360,163 @@
         public twoSidedLighting = false;
         public twoSidedLighting = false;
 
 
         /**
         /**
+         * Secifies that the alpha is premultiplied before output (this enables alpha premultiplied blending).
+         * in your scene composition.
+         */
+        @serialize()
+        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
+        public premultiplyAlpha = false;
+
+        /**
+         * A fresnel is applied to the alpha of the model to ensure grazing angles edges are not alpha tested.
+         * And/Or ocllude the blended part.
+         */
+        @serialize()
+        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
+        public useAlphaFresnel = false;
+
+        /**
+         * Gets the image processing configuration used either in this material.
+         */
+        public get imageProcessingConfiguration(): ImageProcessing {
+            return this._imageProcessingConfiguration;
+        }
+
+        /**
+         * Sets the Default image processing configuration used either in the this material.
+         * 
+         * If sets to null, the scene one is in use.
+         */
+        public set imageProcessingConfiguration(value: ImageProcessing) {
+            this._attachImageProcessingConfiguration(value);
+        }
+
+        /**
+         * Gets Color curves setup used in the effect if colorCurvesEnabled is set to true .
+         */
+        public get colorCurves(): ColorCurves {
+            return this.imageProcessingConfiguration.colorCurves;
+        }
+        /**
+         * Sets Color curves setup used in the effect if colorCurvesEnabled is set to true .
+         */
+        public set colorCurves(value: ColorCurves) {
+            this.imageProcessingConfiguration.colorCurves = value;
+        }
+
+        /**
+         * Gets wether the color curves effect is enabled.
+         */
+        public get colorCurvesEnabled(): boolean {
+            return this.imageProcessingConfiguration.colorCurvesEnabled;
+        }
+        /**
+         * Sets wether the color curves effect is enabled.
+         */
+        public set colorCurvesEnabled(value: boolean) {
+            this.imageProcessingConfiguration.colorCurvesEnabled = value;
+        }
+
+        /**
+         * Gets Color grading LUT texture used in the effect if colorGradingEnabled is set to true.
+         */
+        public get colorGradingTexture(): BaseTexture {
+            return this.imageProcessingConfiguration.colorGradingTexture;
+        }
+        /**
+         * Sets Color grading LUT texture used in the effect if colorGradingEnabled is set to true.
+         */
+        public set colorGradingTexture(value: BaseTexture) {
+            this.imageProcessingConfiguration.colorGradingTexture = value;
+        }
+
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public get colorGradingEnabled(): boolean {
+            return this.imageProcessingConfiguration.colorGradingEnabled;
+        }
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public set colorGradingEnabled(value: boolean) {
+            this.imageProcessingConfiguration.colorGradingEnabled = value;
+        }
+
+        /**
+         * The camera exposure used on this material.
+         * This property is here and not in the camera to allow controlling exposure without full screen post process.
+         * This corresponds to a photographic exposure.
+         */
+        public get cameraExposure(): number {
+            return this._imageProcessingConfiguration.cameraExposure;
+        };
+        /**
+         * The camera exposure used on this material.
+         * This property is here and not in the camera to allow controlling exposure without full screen post process.
+         * This corresponds to a photographic exposure.
+         */
+        public set cameraExposure(value: number) {
+            this._imageProcessingConfiguration.cameraExposure = value;
+        };
+        
+        /**
+         * Gets The camera contrast used on this material.
+         */
+        public get cameraContrast(): number {
+            return this._imageProcessingConfiguration.cameraContrast;
+        }
+
+        /**
+         * Sets The camera contrast used on this material.
+         */
+        public set cameraContrast(value: number) {
+            this._imageProcessingConfiguration.cameraContrast = value;
+        }
+        
+        /**
+         * Gets the Color Grading 2D Lookup Texture.
+         */
+        public get cameraColorGradingTexture(): BaseTexture {
+            return this._imageProcessingConfiguration.colorGradingTexture;
+        }
+        /**
+         * Sets the Color Grading 2D Lookup Texture.
+         */
+        public set cameraColorGradingTexture(value: BaseTexture) {
+            this._imageProcessingConfiguration.colorGradingTexture = value;
+        }
+
+        /**
+         * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). 
+         * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects.
+         * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; 
+         * corresponding to low luminance, medium luminance, and high luminance areas respectively.
+         */
+        public get cameraColorCurves(): ColorCurves {
+            return this._imageProcessingConfiguration.colorCurves;
+        }
+        /**
+         * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). 
+         * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects.
+         * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; 
+         * corresponding to low luminance, medium luminance, and high luminance areas respectively.
+         */
+        public set cameraColorCurves(value: ColorCurves) {
+            this._imageProcessingConfiguration.colorCurves = value;
+        }
+
+        /**
+         * If true, it allows the output of the shader to be in hdr space (e.g. more than one) which is useful
+         * in combination of post process in float or half float mode.
+         * 
+         * This also disable the image procesing that require to be applied separately.
+         */
+        @serialize()
+        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
+        public hdrLinearOutput = false;
+
+        /**
          * Instantiates a new PBRMaterial instance.
          * Instantiates a new PBRMaterial instance.
          * 
          * 
          * @param name The material name
          * @param name The material name

+ 0 - 42
src/Materials/Textures/babylon.colorGradingTexture.ts

@@ -186,48 +186,6 @@ module BABYLON {
             }
             }
         }
         }
 
 
-         /**
-         * Binds the color grading to the shader.
-         * @param colorGrading The texture to bind
-         * @param effect The effect to bind to
-         */
-        public static Bind(colorGrading: BaseTexture, effect: Effect) : void {
-            effect.setTexture("cameraColorGrading2DSampler", colorGrading);
-                        
-     	    let x = colorGrading.level;                 // Texture Level
-            let y = colorGrading.getSize().height;      // Texture Size example with 8
-            let z = y - 1.0;                    // SizeMinusOne 8 - 1
-            let w = 1 / y;                      // Space of 1 slice 1 / 8
-            
-            effect.setFloat4("vCameraColorGradingInfos", x, y, z, w);
-            
-            let slicePixelSizeU = w / y;    // Space of 1 pixel in U direction, e.g. 1/64
-            let slicePixelSizeV = w;		// Space of 1 pixel in V direction, e.g. 1/8					    // Space of 1 pixel in V direction, e.g. 1/8
-            
-            let x2 = z * slicePixelSizeU;   // Extent of lookup range in U for a single slice so that range corresponds to (size-1) texels, for example 7/64
-            let y2 = z / y;	                // Extent of lookup range in V for a single slice so that range corresponds to (size-1) texels, for example 7/8
-            let z2 = 0.5 * slicePixelSizeU;	// Offset of lookup range in U to align sample position with texel centre, for example 0.5/64 
-            let w2 = 0.5 * slicePixelSizeV;	// Offset of lookup range in V to align sample position with texel centre, for example 0.5/8
-            
-            effect.setFloat4("vCameraColorGradingScaleOffset", x2, y2, z2, w2);
-        }
-        
-        /**
-         * Prepare the list of uniforms associated with the ColorGrading effects.
-         * @param uniformsList The list of uniforms used in the effect
-         * @param samplersList The list of samplers used in the effect
-         */
-        public static PrepareUniformsAndSamplers(uniformsList: string[], samplersList: string[]): void {
-            uniformsList.push(
-                "vCameraColorGradingInfos", 
-                "vCameraColorGradingScaleOffset"
-            );
-
-            samplersList.push(
-                "cameraColorGrading2DSampler"
-            );
-        }
-
         /**
         /**
          * Parses a color grading texture serialized by Babylon.
          * Parses a color grading texture serialized by Babylon.
          * @param parsedTexture The texture information being parsedTexture
          * @param parsedTexture The texture information being parsedTexture

+ 4 - 0
src/Materials/babylon.effect.ts

@@ -197,6 +197,10 @@
             return this._isReady;
             return this._isReady;
         }
         }
 
 
+        public getEngine(): Engine {
+            return this._engine;
+        }
+
         public getProgram(): WebGLProgram {
         public getProgram(): WebGLProgram {
             return this._program;
             return this._program;
         }
         }

+ 357 - 0
src/Materials/babylon.imageProcessing.ts

@@ -0,0 +1,357 @@
+module BABYLON {
+    /**
+     * Interface to follow in your material defines to integrate easily the
+     * Image proccessing functions.
+     */
+    export interface IImageProcessingDefines {
+        IMAGEPROCESSING: boolean;
+        VIGNETTE: boolean;
+        VIGNETTEBLENDMODEMULTIPLY: boolean;
+        VIGNETTEBLENDMODEOPAQUE: boolean;
+        TONEMAPPING: boolean;
+        CONTRAST: boolean;
+        COLORCURVES: boolean;
+        COLORGRADING: boolean;
+    }
+
+    /**
+     * This groups together the common properties used for image processing either in direct forward pass
+     * or through post processing effect depending on the use of the image processing pipeline in your scene 
+     * or not.
+     */
+    export class ImageProcessing {
+
+        /**
+         * Color curves setup used in the effect if colorCurvesEnabled is set to true 
+         */
+        @serializeAsColorCurves()
+        public colorCurves = new ColorCurves();
+
+        @serialize()
+        private _colorCurvesEnabled = true;
+        /**
+         * Gets wether the color curves effect is enabled.
+         */
+        public get colorCurvesEnabled(): boolean {
+            return this._colorCurvesEnabled;
+        }
+        /**
+         * Gets wether the color curves effect is enabled.
+         */
+        public set colorCurvesEnabled(value: boolean) {
+            if (this._colorCurvesEnabled === value) {
+                return;
+            }
+
+            this._colorCurvesEnabled = value;
+            this._updateParameters();
+        }
+
+        /**
+         * Color grading LUT texture used in the effect if colorGradingEnabled is set to true 
+         */
+        @serializeAsTexture()
+        public colorGradingTexture: BaseTexture;
+
+        @serialize()
+        private _colorGradingEnabled = true;
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public get colorGradingEnabled(): boolean {
+            return this._colorGradingEnabled;
+        }
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public set colorGradingEnabled(value: boolean) {
+            if (this._colorGradingEnabled === value) {
+                return;
+            }
+
+            this._colorGradingEnabled = value;
+            this._updateParameters();
+        }
+
+        @serialize()
+        protected _cameraExposure = 1.0;
+        /**
+         * Gets Camera exposure used in the effect.
+         */
+        public get cameraExposure(): number {
+            return this._cameraExposure;
+        }
+        /**
+         * Gets Camera exposure used in the effect.
+         */
+        public set cameraExposure(value: number) {
+            if (this._cameraExposure === value) {
+                return;
+            }
+
+            this._cameraExposure = value;
+            this._updateParameters();
+        }
+
+        @serialize()
+        protected _cameraContrast = 1.0;
+        /**
+         * Gets Camera contrast used in the effect.
+         */
+        public get cameraContrast(): number {
+            return this._cameraContrast;
+        }
+        /**
+         * Gets Camera contrast used in the effect.
+         */
+        public set cameraContrast(value: number) {
+            if (this._cameraContrast === value) {
+                return;
+            }
+
+            this._cameraContrast = value;
+            this._updateParameters();
+        }
+
+        /**
+         * Vignette stretch size.
+         */
+        @serialize()
+        public vignetteStretch = 0;
+
+        /**
+         * Vignette centre X Offset.
+         */
+        @serialize()
+        public vignetteCentreX = 0;
+
+        /**
+         * Vignette centre Y Offset.
+         */
+        @serialize()
+        public vignetteCentreY = 0;
+
+        /**
+         * Vignette weight or intensity of the vignette effect.
+         */
+        @serialize()
+        public vignetteWeight = 1.5;
+
+        /**
+         * Color of the vignette applied on the screen through the chosen blend mode (vignetteBlendMode)
+         * if vignetteEnabled is set to true.
+         */
+        @serializeAsColor4()
+        public vignetteColor: BABYLON.Color4 = new BABYLON.Color4(0, 0, 0, 0);
+
+        /**
+         * Camera field of view used by the Vignette effect.
+         */
+        @serialize()
+        public cameraFov = 0.5;
+
+        @serialize()
+        private _vignetteBlendMode = ImageProcessing.VIGNETTEMODE_MULTIPLY;
+        /**
+         * Gets the vignette blend mode allowing different kind of effect.
+         */
+        public get vignetteBlendMode(): number {
+            return this._vignetteBlendMode;
+        }
+        /**
+         * Sets the vignette blend mode allowing different kind of effect.
+         */
+        public set vignetteBlendMode(value: number) {
+            if (this._vignetteBlendMode === value) {
+                return;
+            }
+
+            this._vignetteBlendMode = value;
+            this._updateParameters();
+        }
+
+        @serialize()
+        private _vignetteEnabled = true;
+        /**
+         * Gets wether the vignette effect is enabled.
+         */
+        public get vignetteEnabled(): boolean {
+            return this._vignetteEnabled;
+        }
+        /**
+         * Sets wether the vignette effect is enabled.
+         */
+        public set vignetteEnabled(value: boolean) {
+            if (this._vignetteEnabled === value) {
+                return;
+            }
+
+            this._vignetteEnabled = value;
+            this._updateParameters();
+        }
+
+        /**
+        * An event triggered when the configuration changes and requires Shader to Update some parameters.
+        * @type {BABYLON.Observable}
+        */
+        public onUpdateParameters = new Observable<ImageProcessing>();
+
+        /**
+         * Method called each time the image processing information changes requires to recompile the effect.
+         */
+        protected _updateParameters(): void {
+            this.onUpdateParameters.notifyObservers(this);
+        }
+
+        /**
+         * Prepare the list of uniforms associated with the Image Processing effects.
+         * @param uniformsList The list of uniforms used in the effect
+         * @param defines the list of defines currently in use
+         */
+        public static PrepareUniforms(uniforms: string[], defines: IImageProcessingDefines): void {
+            if (defines.CONTRAST) {
+                uniforms.push("contrast");
+            }
+            if (defines.TONEMAPPING) {
+                uniforms.push("cameraExposureLinear");
+            }
+            if (defines.COLORGRADING) {
+                uniforms.push("colorTransformSettings");
+            }
+            if (defines.VIGNETTE) {
+                uniforms.push("vInverseScreenSize");
+                uniforms.push("vignetteSettings1");
+                uniforms.push("vignetteSettings2");
+            }
+            if (defines.COLORCURVES) {
+                ColorCurves.PrepareUniforms(uniforms);
+            }
+        }
+
+        /**
+         * Prepare the list of samplers associated with the Image Processing effects.
+         * @param uniformsList The list of uniforms used in the effect
+         * @param defines the list of defines currently in use
+         */
+        public static PrepareSamplers(samplersList: string[], defines: IImageProcessingDefines): void {
+            if (defines.COLORGRADING) {
+                samplersList.push("txColorTransform");
+            }
+        }
+
+        /**
+         * Prepare the list of defines associated to the shader.
+         * @param defines the list of defines to complete
+         */
+        public prepareDefines(defines: IImageProcessingDefines): void {
+            defines.VIGNETTE = this.vignetteEnabled;
+            defines.VIGNETTEBLENDMODEMULTIPLY = (this.vignetteBlendMode === ImageProcessing._VIGNETTEMODE_MULTIPLY);
+            defines.VIGNETTEBLENDMODEOPAQUE = !defines.VIGNETTEBLENDMODEMULTIPLY;
+            defines.TONEMAPPING = (this.cameraExposure !== 1.0);
+            defines.CONTRAST = (this.cameraContrast !== 1.0);
+            defines.COLORCURVES = (this.colorCurvesEnabled && !!this.colorCurves);
+            defines.COLORGRADING = (this.colorGradingEnabled && !!this.colorGradingTexture);
+            defines.IMAGEPROCESSING = defines.VIGNETTE || defines.TONEMAPPING || defines.CONTRAST || defines.COLORCURVES || defines.COLORGRADING;
+        }
+
+        /**
+         * Returns true if all the image processing information are ready.
+         */
+        public isReady() {
+            // Color Grading texure can not be none blocking.
+            return !this.colorGradingEnabled || !this.colorGradingTexture || this.colorGradingTexture.isReady();
+        }
+
+        /**
+         * Binds the image processing to the shader.
+         * @param effect The effect to bind to
+         */
+        public bind(effect: Effect, aspectRatio = 1) : void {
+            // Color Curves
+            if (this._colorCurvesEnabled) {
+                ColorCurves.Bind(this.colorCurves, effect);
+            }
+
+            // Vignette
+            if (this._vignetteEnabled) {
+                var inverseWidth = 1 / effect.getEngine().getRenderWidth();
+                var inverseHeight = 1 / effect.getEngine().getRenderHeight();
+                effect.setFloat2("vInverseScreenSize", 1 / inverseWidth, 1 / inverseHeight);
+
+                let vignetteScaleY = Math.tan(this.cameraFov * 0.5);
+                let vignetteScaleX = vignetteScaleY * aspectRatio;
+
+                let vignetteScaleGeometricMean = Math.sqrt(vignetteScaleX * vignetteScaleY);
+                vignetteScaleX = Tools.Mix(vignetteScaleX, vignetteScaleGeometricMean, this.vignetteStretch);
+                vignetteScaleY = Tools.Mix(vignetteScaleY, vignetteScaleGeometricMean, this.vignetteStretch);
+
+                effect.setFloat4("vignetteSettings1", vignetteScaleX, vignetteScaleY, -vignetteScaleX * this.vignetteCentreX, -vignetteScaleY * this.vignetteCentreY);
+
+                let vignettePower = -2.0 * this.vignetteWeight;
+                effect.setFloat4("vignetteSettings2", this.vignetteColor.r, this.vignetteColor.g, this.vignetteColor.b, vignettePower);
+            }
+
+            // Exposure
+            effect.setFloat("cameraExposureLinear", Math.pow(2.0, -this.cameraExposure) * Math.PI);
+            
+            // Contrast
+            effect.setFloat("contrast", this.cameraContrast);
+            
+            // Color transform settings
+            if (this.colorGradingTexture) {
+                effect.setTexture("txColorTransform", this.colorGradingTexture);
+                let textureSize = this.colorGradingTexture.getSize().height;
+
+                effect.setFloat4("colorTransformSettings",
+                    (textureSize - 1) / textureSize, // textureScale
+                    0.5 / textureSize, // textureOffset
+                    textureSize, // textureSize
+                    this.colorGradingTexture.level // weight
+                );
+            }
+        }
+
+        /**
+         * Clones the current image processing instance.
+         * @return The cloned image processing
+         */
+        public clone(): ImageProcessing {
+            return SerializationHelper.Clone(() => new ImageProcessing(), this);
+        }
+
+        /**
+         * Serializes the current image processing instance to a json representation.
+         * @return a JSON representation
+         */
+        public serialize(): any {
+            return SerializationHelper.Serialize(this);
+        }
+
+        /**
+         * Parses the image processing from a json representation.
+         * @param source the JSON source to parse
+         * @return The parsed image processing
+         */      
+        public static Parse(source: any) : ImageProcessing {
+            return SerializationHelper.Parse(() => new ImageProcessing(), source, null, null);
+        }
+
+        // Static constants associated to the image processing.
+        private static _VIGNETTEMODE_MULTIPLY = 0;
+        private static _VIGNETTEMODE_OPAQUE = 1;
+
+        /**
+         * Used to apply the vignette as a mix with the pixel color.
+         */
+        public static get VIGNETTEMODE_MULTIPLY(): number {
+            return this._VIGNETTEMODE_MULTIPLY;
+        }
+
+        /**
+         * Used to apply the vignette as a replacement of the pixel color.
+         */
+        public static get VIGNETTEMODE_OPAQUE(): number {
+            return this._VIGNETTEMODE_OPAQUE;
+        }
+    }
+} 

+ 5 - 4
src/Materials/babylon.materialHelper.ts

@@ -322,17 +322,18 @@
             light.transferToEffect(effect, lightIndex + "");
             light.transferToEffect(effect, lightIndex + "");
         }
         }
 
 
-        public static BindLights(scene: Scene, mesh: AbstractMesh, effect: Effect, defines: MaterialDefines, maxSimultaneousLights = 4) {
+        public static BindLights(scene: Scene, mesh: AbstractMesh, effect: Effect, defines: MaterialDefines, maxSimultaneousLights = 4, , usePhysicalLightFalloff = false) {
             var lightIndex = 0;
             var lightIndex = 0;
             for (var light of mesh._lightSources) {
             for (var light of mesh._lightSources) {
+                let scaledIntensity = light.getScaledIntensity();
                 light._uniformBuffer.bindToEffect(effect, "Light" + lightIndex);
                 light._uniformBuffer.bindToEffect(effect, "Light" + lightIndex);
 
 
                 MaterialHelper.BindLightProperties(light, effect, lightIndex);
                 MaterialHelper.BindLightProperties(light, effect, lightIndex);
 
 
-                light.diffuse.scaleToRef(light.intensity, Tmp.Color3[0]);
-                light._uniformBuffer.updateColor4("vLightDiffuse", Tmp.Color3[0], light.range, lightIndex + "");
+                light.diffuse.scaleToRef(scaledIntensity, Tmp.Color3[0]);
+                light._uniformBuffer.updateColor4("vLightDiffuse", Tmp.Color3[0], usePhysicalLightFalloff ? light.radius : light.range, lightIndex + "");
                 if (defines["SPECULARTERM"]) {
                 if (defines["SPECULARTERM"]) {
-                    light.specular.scaleToRef(light.intensity, Tmp.Color3[1]);
+                    light.specular.scaleToRef(scaledIntensity, Tmp.Color3[1]);
                     light._uniformBuffer.updateColor3("vLightSpecular", Tmp.Color3[1], lightIndex + "");
                     light._uniformBuffer.updateColor3("vLightSpecular", Tmp.Color3[1], lightIndex + "");
                 }
                 }
 
 

+ 192 - 50
src/Materials/babylon.standardMaterial.ts

@@ -1,5 +1,5 @@
 module BABYLON {
 module BABYLON {
-   export class StandardMaterialDefines extends MaterialDefines {
+   export class StandardMaterialDefines extends MaterialDefines implements IImageProcessingDefines {
         public DIFFUSE = false;
         public DIFFUSE = false;
         public AMBIENT = false;
         public AMBIENT = false;
         public OPACITY = false;
         public OPACITY = false;
@@ -57,14 +57,21 @@ module BABYLON {
         public INVERTNORMALMAPY = false;
         public INVERTNORMALMAPY = false;
         public TWOSIDEDLIGHTING = false;
         public TWOSIDEDLIGHTING = false;
         public SHADOWFLOAT = false;
         public SHADOWFLOAT = false;
-        public CAMERACOLORGRADING = false;
-        public CAMERACOLORCURVES = false;
         public MORPHTARGETS = false;
         public MORPHTARGETS = false;
         public MORPHTARGETS_NORMAL = false;
         public MORPHTARGETS_NORMAL = false;
         public MORPHTARGETS_TANGENT = false;
         public MORPHTARGETS_TANGENT = false;
         public NUM_MORPH_INFLUENCERS = 0;
         public NUM_MORPH_INFLUENCERS = 0;
         public USERIGHTHANDEDSYSTEM = false;
         public USERIGHTHANDEDSYSTEM = false;
 
 
+        public IMAGEPROCESSING = false;
+        public VIGNETTE = false;
+        public VIGNETTEBLENDMODEMULTIPLY = false;
+        public VIGNETTEBLENDMODEOPAQUE = false;
+        public TONEMAPPING = false;
+        public CONTRAST = false;
+        public COLORCURVES = false;
+        public COLORGRADING = false;
+
         constructor() {
         constructor() {
             super();
             super();
             this.rebuild();
             this.rebuild();
@@ -271,13 +278,157 @@ module BABYLON {
         public twoSidedLighting: boolean;     
         public twoSidedLighting: boolean;     
 
 
         /**
         /**
-         * Color Grading 2D Lookup Texture.
-         * This allows special effects like sepia, black and white to sixties rendering style. 
+         * Default configuration related to image processing available in the standard Material.
          */
          */
-        @serializeAsTexture("cameraColorGradingTexture")
-        private _cameraColorGradingTexture: BaseTexture;
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public cameraColorGradingTexture: BaseTexture;             
+        protected _imageProcessingConfiguration: ImageProcessing;
+
+        /**
+         * Gets the image processing configuration used either in this material.
+         */
+        public get imageProcessingConfiguration(): ImageProcessing {
+            return this._imageProcessingConfiguration;
+        }
+
+        /**
+         * Sets the Default image processing configuration used either in the this material.
+         * 
+         * If sets to null, the scene one is in use.
+         */
+        public set imageProcessingConfiguration(value: ImageProcessing) {
+            this._attachImageProcessingConfiguration(value);
+        }
+
+        /**
+         * Keep track of the image processing observer to allow dispose and replace.
+         */
+        private _imageProcessingObserver: Observer<ImageProcessing>;
+
+        /**
+         * Attaches a new image processing configuration to the Standard Material.
+         * @param configuration 
+         */
+        protected _attachImageProcessingConfiguration(configuration: ImageProcessing): void {
+            if (configuration === this._imageProcessingConfiguration) {
+                return;
+            }
+
+            // Detaches observer.
+            if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
+                this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
+            }
+
+            // Pick the scene configuration if needed.
+            if (!configuration) {
+                this._imageProcessingConfiguration = this.getScene().imageProcessingConfiguration;
+            }
+            else {
+                this._imageProcessingConfiguration = configuration;
+            }
+
+            // Attaches observer.
+            this._imageProcessingObserver = this._imageProcessingConfiguration.onUpdateParameters.add(conf => {
+                this._markAllSubMeshesAsTexturesDirty();
+            });
+
+            // Ensure the effect will be rebuilt.
+            this._markAllSubMeshesAsTexturesDirty();
+        }
+
+        /**
+         * Gets Color curves setup used in the effect if colorCurvesEnabled is set to true .
+         */
+        public get colorCurves(): ColorCurves {
+            return this.imageProcessingConfiguration.colorCurves;
+        }
+        /**
+         * Sets Color curves setup used in the effect if colorCurvesEnabled is set to true .
+         */
+        public set colorCurves(value: ColorCurves) {
+            this.imageProcessingConfiguration.colorCurves = value;
+        }
+
+        /**
+         * Gets wether the color curves effect is enabled.
+         */
+        public get colorCurvesEnabled(): boolean {
+            return this.imageProcessingConfiguration.colorCurvesEnabled;
+        }
+        /**
+         * Sets wether the color curves effect is enabled.
+         */
+        public set colorCurvesEnabled(value: boolean) {
+            this.imageProcessingConfiguration.colorCurvesEnabled = value;
+        }
+
+        /**
+         * Gets Color grading LUT texture used in the effect if colorGradingEnabled is set to true.
+         */
+        public get colorGradingTexture(): BaseTexture {
+            return this.imageProcessingConfiguration.colorGradingTexture;
+        }
+        /**
+         * Sets Color grading LUT texture used in the effect if colorGradingEnabled is set to true.
+         */
+        public set colorGradingTexture(value: BaseTexture) {
+            this.imageProcessingConfiguration.colorGradingTexture = value;
+        }
+
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public get colorGradingEnabled(): boolean {
+            return this.imageProcessingConfiguration.colorGradingEnabled;
+        }
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public set colorGradingEnabled(value: boolean) {
+            this.imageProcessingConfiguration.colorGradingEnabled = value;
+        }
+
+        /**
+         * The camera exposure used on this material.
+         * This property is here and not in the camera to allow controlling exposure without full screen post process.
+         * This corresponds to a photographic exposure.
+         */
+        public get cameraExposure(): number {
+            return this._imageProcessingConfiguration.cameraExposure;
+        };
+        /**
+         * The camera exposure used on this material.
+         * This property is here and not in the camera to allow controlling exposure without full screen post process.
+         * This corresponds to a photographic exposure.
+         */
+        public set cameraExposure(value: number) {
+            this._imageProcessingConfiguration.cameraExposure = value;
+        };
+        
+        /**
+         * Gets The camera contrast used on this material.
+         */
+        public get cameraContrast(): number {
+            return this._imageProcessingConfiguration.cameraContrast;
+        }
+
+        /**
+         * Sets The camera contrast used on this material.
+         */
+        public set cameraContrast(value: number) {
+            this._imageProcessingConfiguration.cameraContrast = value;
+        }
+        
+        /**
+         * Gets the Color Grading 2D Lookup Texture.
+         */
+        public get cameraColorGradingTexture(): BaseTexture {
+            return this._imageProcessingConfiguration.colorGradingTexture;
+        }
+        /**
+         * Sets the Color Grading 2D Lookup Texture.
+         */
+        public set cameraColorGradingTexture(value: BaseTexture) {
+            this._imageProcessingConfiguration.colorGradingTexture = value;
+        }
 
 
         /**
         /**
          * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). 
          * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). 
@@ -285,10 +436,18 @@ module BABYLON {
          * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; 
          * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; 
          * corresponding to low luminance, medium luminance, and high luminance areas respectively.
          * corresponding to low luminance, medium luminance, and high luminance areas respectively.
          */
          */
-        @serializeAsColorCurves("cameraColorCurves")
-        private _cameraColorCurves: ColorCurves = null;
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public cameraColorCurves: ColorCurves;             
+        public get cameraColorCurves(): ColorCurves {
+            return this._imageProcessingConfiguration.colorCurves;
+        }
+        /**
+         * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). 
+         * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects.
+         * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; 
+         * corresponding to low luminance, medium luminance, and high luminance areas respectively.
+         */
+        public set cameraColorCurves(value: ColorCurves) {
+            this._imageProcessingConfiguration.colorCurves = value;
+        }
 
 
         public customShaderNameResolve: (shaderName: string, uniforms: string[], uniformBuffers: string[], samplers: string[], defines: StandardMaterialDefines) => string;
         public customShaderNameResolve: (shaderName: string, uniforms: string[], uniformBuffers: string[], samplers: string[], defines: StandardMaterialDefines) => string;
 
 
@@ -301,6 +460,9 @@ module BABYLON {
         constructor(name: string, scene: Scene) {
         constructor(name: string, scene: Scene) {
             super(name, scene);
             super(name, scene);
 
 
+            // Setup the default processing configuration to the scene.
+            this._attachImageProcessingConfiguration(null);
+
             this.getRenderTargetTextures = (): SmartArray<RenderTargetTexture> => {
             this.getRenderTargetTextures = (): SmartArray<RenderTargetTexture> => {
                 this._renderTargets.reset();
                 this._renderTargets.reset();
 
 
@@ -525,17 +687,6 @@ module BABYLON {
                         defines.REFRACTION = false;
                         defines.REFRACTION = false;
                     }
                     }
 
 
-                    if (this._cameraColorGradingTexture && StandardMaterial.ColorGradingTextureEnabled) {
-                        // Camera Color Grading can not be none blocking.
-                        if (!this._cameraColorGradingTexture.isReady()) {
-                            return false;
-                        } else {
-                            defines.CAMERACOLORGRADING = true;
-                        }
-                    } else {
-                        defines.CAMERACOLORGRADING = false;
-                    }
-
                     defines.TWOSIDEDLIGHTING = !this._backFaceCulling && this._twoSidedLighting;
                     defines.TWOSIDEDLIGHTING = !this._backFaceCulling && this._twoSidedLighting;
                 } else {
                 } else {
                     defines.DIFFUSE = false;
                     defines.DIFFUSE = false;
@@ -546,10 +697,13 @@ module BABYLON {
                     defines.LIGHTMAP = false;
                     defines.LIGHTMAP = false;
                     defines.BUMP = false;
                     defines.BUMP = false;
                     defines.REFRACTION = false;
                     defines.REFRACTION = false;
-                    defines.CAMERACOLORGRADING = false;
                 }
                 }
 
 
-                defines.CAMERACOLORCURVES = (this._cameraColorCurves !== undefined && this._cameraColorCurves !== null);
+                if (!this.imageProcessingConfiguration.isReady()) {
+                    return false;
+                }
+
+                this.imageProcessingConfiguration.prepareDefines(defines);
 
 
                 defines.ALPHAFROMDIFFUSE = this._shouldUseAlphaFromDiffuseTexture();
                 defines.ALPHAFROMDIFFUSE = this._shouldUseAlphaFromDiffuseTexture();
 
 
@@ -558,7 +712,7 @@ module BABYLON {
                 defines.LINKEMISSIVEWITHDIFFUSE = this._linkEmissiveWithDiffuse;       
                 defines.LINKEMISSIVEWITHDIFFUSE = this._linkEmissiveWithDiffuse;       
 
 
                 defines.SPECULAROVERALPHA = this._useSpecularOverAlpha;
                 defines.SPECULAROVERALPHA = this._useSpecularOverAlpha;
-            } 
+            }
 
 
             if (defines._areFresnelDirty) {
             if (defines._areFresnelDirty) {
                 if (StandardMaterial.FresnelEnabled) {
                 if (StandardMaterial.FresnelEnabled) {
@@ -709,12 +863,9 @@ module BABYLON {
 
 
                 var uniformBuffers = ["Material", "Scene"];
                 var uniformBuffers = ["Material", "Scene"];
 
 
-                if (defines.CAMERACOLORCURVES) {
-                    ColorCurves.PrepareUniforms(uniforms);
-                }
-                if (defines.CAMERACOLORGRADING) {
-                    ColorGradingTexture.PrepareUniformsAndSamplers(uniforms, samplers);
-                }
+                ImageProcessing.PrepareUniforms(uniforms, defines);
+                ImageProcessing.PrepareSamplers(samplers, defines);
+
                 MaterialHelper.PrepareUniformsAndSamplersList(<EffectCreationOptions>{
                 MaterialHelper.PrepareUniformsAndSamplersList(<EffectCreationOptions>{
                     uniformsNames: uniforms, 
                     uniformsNames: uniforms, 
                     uniformBuffersNames: uniformBuffers,
                     uniformBuffersNames: uniformBuffers,
@@ -970,10 +1121,6 @@ module BABYLON {
                             effect.setTexture("refraction2DSampler", this._refractionTexture);
                             effect.setTexture("refraction2DSampler", this._refractionTexture);
                         }
                         }
                     }
                     }
-                    
-                    if (this._cameraColorGradingTexture && StandardMaterial.ColorGradingTextureEnabled) {
-                        ColorGradingTexture.Bind(this._cameraColorGradingTexture, effect);
-                    }
                 }
                 }
 
 
                 // Clip plane
                 // Clip plane
@@ -1002,17 +1149,16 @@ module BABYLON {
 
 
                 // Morph targets
                 // Morph targets
                 if (defines.NUM_MORPH_INFLUENCERS) {
                 if (defines.NUM_MORPH_INFLUENCERS) {
-                    MaterialHelper.BindMorphTargetParameters(mesh, effect);                
+                    MaterialHelper.BindMorphTargetParameters(mesh, effect);
                 }
                 }
 
 
                 // Log. depth
                 // Log. depth
                 MaterialHelper.BindLogDepth(defines, effect, scene);
                 MaterialHelper.BindLogDepth(defines, effect, scene);
 
 
-                // Color Curves
-                if (this._cameraColorCurves) {
-                    ColorCurves.Bind(this._cameraColorCurves, effect);
+                // image processing
+                if (this.imageProcessingConfiguration) {
+                    this.imageProcessingConfiguration.bind(this._activeEffect);
                 }
                 }
-
             }
             }
 
 
             this._uniformBuffer.update();
             this._uniformBuffer.update();
@@ -1057,10 +1203,6 @@ module BABYLON {
             if (this._refractionTexture && this._refractionTexture.animations && this._refractionTexture.animations.length > 0) {
             if (this._refractionTexture && this._refractionTexture.animations && this._refractionTexture.animations.length > 0) {
                 results.push(this._refractionTexture);
                 results.push(this._refractionTexture);
             }
             }
-            
-            if (this._cameraColorGradingTexture && this._cameraColorGradingTexture.animations && this._cameraColorGradingTexture.animations.length > 0) {
-                results.push(this._cameraColorGradingTexture);
-            }
 
 
             return results;
             return results;
         }
         }
@@ -1102,10 +1244,10 @@ module BABYLON {
                 if (this._refractionTexture) {
                 if (this._refractionTexture) {
                     this._refractionTexture.dispose();
                     this._refractionTexture.dispose();
                 }
                 }
-                
-                if (this._cameraColorGradingTexture) {
-                    this._cameraColorGradingTexture.dispose();
-                }
+            }
+
+            if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
+                this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
             }
             }
 
 
             super.dispose(forceDisposeEffect, forceDisposeTextures);
             super.dispose(forceDisposeEffect, forceDisposeTextures);

+ 257 - 148
src/PostProcess/babylon.imageProcessingPostProcess.ts

@@ -1,82 +1,260 @@
 module BABYLON {
 module BABYLON {
     export class ImageProcessingPostProcess extends PostProcess {
     export class ImageProcessingPostProcess extends PostProcess {
-		private _colorGradingTexture: BaseTexture;
-		public colorGradingWeight: number = 1.0;
-		public colorCurves = new ColorCurves();
-        private _colorCurvesEnabled = true;
 
 
-        public cameraFov = 0.5;
+        /**
+         * Default configuration related to image processing available in the PBR Material.
+         */
+        protected _imageProcessingConfiguration: ImageProcessing;
 
 
-		public vignetteStretch = 0;
-		public vignetteCentreX = 0;
-		public vignetteCentreY = 0;
-		public vignetteWeight = 1.5;
-		public vignetteColor: BABYLON.Color4 = new BABYLON.Color4(0, 0, 0, 0);
-		private _vignetteBlendMode = ImageProcessingPostProcess.VIGNETTEMODE_MULTIPLY;
-        private _vignetteEnabled = true;
-
-		public cameraContrast = 1.0;
-		public cameraExposure = 1.68;
-		private _cameraToneMappingEnabled = true;
-
-        private _fromLinearSpace = false;
+        /**
+         * Gets the image processing configuration used either in this material.
+         */
+        public get imageProcessingConfiguration(): ImageProcessing {
+            return this._imageProcessingConfiguration;
+        }
 
 
-        public get colorGradingTexture(): BaseTexture {
-            return this._colorGradingTexture;
+        /**
+         * Sets the Default image processing configuration used either in the this material.
+         * 
+         * If sets to null, the scene one is in use.
+         */
+        public set imageProcessingConfiguration(value: ImageProcessing) {
+            this._attachImageProcessingConfiguration(value);
         }
         }
 
 
-        public set colorGradingTexture(value: BaseTexture) {
-            if (this._colorGradingTexture === value) {
+        /**
+         * Keep track of the image processing observer to allow dispose and replace.
+         */
+        private _imageProcessingObserver: Observer<ImageProcessing>;
+
+        /**
+         * Attaches a new image processing configuration to the PBR Material.
+         * @param configuration 
+         */
+        protected _attachImageProcessingConfiguration(configuration: ImageProcessing): void {
+            if (configuration === this._imageProcessingConfiguration) {
                 return;
                 return;
             }
             }
 
 
-            this._colorGradingTexture = value;
+            // Detaches observer.
+            if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
+                this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
+            }
+
+            // Pick the scene configuration if needed.
+            if (!configuration) {
+                var camera = this.getCamera();
+                var scene = camera ? camera.getScene() : BABYLON.Engine.LastCreatedScene;
+                this._imageProcessingConfiguration = scene.imageProcessingConfiguration;
+            }
+            else {
+                this._imageProcessingConfiguration = configuration;
+            }
+
+            // Attaches observer.
+            this._imageProcessingObserver = this._imageProcessingConfiguration.onUpdateParameters.add(conf => {
+                this._updateParameters();
+            });
+
+            // Ensure the effect will be rebuilt.
             this._updateParameters();
             this._updateParameters();
         }
         }
 
 
-        public get vignetteBlendMode(): number {
-            return this._vignetteBlendMode;
+        /**
+         * Gets Color curves setup used in the effect if colorCurvesEnabled is set to true .
+         */
+        public get colorCurves(): ColorCurves {
+            return this.imageProcessingConfiguration.colorCurves;
+        }
+        /**
+         * Sets Color curves setup used in the effect if colorCurvesEnabled is set to true .
+         */
+        public set colorCurves(value: ColorCurves) {
+            this.imageProcessingConfiguration.colorCurves = value;
         }
         }
 
 
-        public set vignetteBlendMode(value: number) {
-            if (this._vignetteBlendMode === value) {
-                return;
-            }
+        /**
+         * Gets wether the color curves effect is enabled.
+         */
+        public get colorCurvesEnabled(): boolean {
+            return this.imageProcessingConfiguration.colorCurvesEnabled;
+        }
+        /**
+         * Sets wether the color curves effect is enabled.
+         */
+        public set colorCurvesEnabled(value: boolean) {
+            this.imageProcessingConfiguration.colorCurvesEnabled = value;
+        }
 
 
-            this._vignetteBlendMode = value;
-            this._updateParameters();
-        }  
+        /**
+         * Gets Color grading LUT texture used in the effect if colorGradingEnabled is set to true.
+         */
+        public get colorGradingTexture(): BaseTexture {
+            return this.imageProcessingConfiguration.colorGradingTexture;
+        }
+        /**
+         * Sets Color grading LUT texture used in the effect if colorGradingEnabled is set to true.
+         */
+        public set colorGradingTexture(value: BaseTexture) {
+            this.imageProcessingConfiguration.colorGradingTexture = value;
+        }
 
 
-        public get colorCurvesEnabled(): boolean {
-            return this._colorCurvesEnabled;
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public get colorGradingEnabled(): boolean {
+            return this.imageProcessingConfiguration.colorGradingEnabled;
+        }
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public set colorGradingEnabled(value: boolean) {
+            this.imageProcessingConfiguration.colorGradingEnabled = value;
         }
         }
 
 
-        public set colorCurvesEnabled(value: boolean) {
-            if (this._colorCurvesEnabled === value) {
-                return;
-            }
+        /**
+         * Gets Camera exposure used in the effect.
+         */
+        public get cameraExposure(): number {
+            return this.imageProcessingConfiguration.cameraExposure;
+        }
+        /**
+         * Sets Camera exposure used in the effect.
+         */
+        public set cameraExposure(value: number) {
+            this.imageProcessingConfiguration.cameraExposure = value;
+        }
 
 
-            this._colorCurvesEnabled = value;
-            this._updateParameters();
-        }           
+        /**
+         * Gets Camera contrast used in the effect.
+         */
+        public get cameraContrast(): number {
+            return this.imageProcessingConfiguration.cameraContrast;
+        }
+        /**
+         * Sets Camera contrast used in the effect.
+         */
+        public set cameraContrast(value: number) {
+            this.imageProcessingConfiguration.cameraContrast = value;
+        }
 
 
-        public get vignetteEnabled(): boolean {
-            return this._vignetteEnabled;
+        /**
+         * Gets Vignette stretch size.
+         */
+        public get vignetteStretch(): number {
+            return this.imageProcessingConfiguration.vignetteStretch;
+        }
+        /**
+         * Sets Vignette stretch size.
+         */
+        public set vignetteStretch(value: number) {
+            this.imageProcessingConfiguration.vignetteStretch = value;
         }
         }
 
 
-        public set vignetteEnabled(value: boolean) {
-            if (this._vignetteEnabled === value) {
-                return;
-            }
+        /**
+         * Gets Vignette centre X Offset.
+         */
+        public get vignetteCentreX(): number {
+            return this.imageProcessingConfiguration.vignetteCentreX;
+        }
+        /**
+         * Sets Vignette centre X Offset.
+         */
+        public set vignetteCentreX(value: number) {
+            this.imageProcessingConfiguration.vignetteCentreX = value;
+        }
 
 
-            this._vignetteEnabled = value;
-            this._updateParameters();
-        }      
+        /**
+         * Gets Vignette centre Y Offset.
+         */
+        public get vignetteCentreY(): number {
+            return this.imageProcessingConfiguration.vignetteCentreY;
+        }
+        /**
+         * Sets Vignette centre Y Offset.
+         */
+        public set vignetteCentreY(value: number) {
+            this.imageProcessingConfiguration.vignetteCentreY = value;
+        }
+
+        /**
+         * Gets Vignette weight or intensity of the vignette effect.
+         */
+        public get vignetteWeight(): number {
+            return this.imageProcessingConfiguration.vignetteWeight;
+        }
+        /**
+         * Sets Vignette weight or intensity of the vignette effect.
+         */
+        public set vignetteWeight(value: number) {
+            this.imageProcessingConfiguration.vignetteWeight = value;
+        }
 
 
+        /**
+         * Gets Color of the vignette applied on the screen through the chosen blend mode (vignetteBlendMode)
+         * if vignetteEnabled is set to true.
+         */
+        public get vignetteColor(): Color4 {
+            return this.imageProcessingConfiguration.vignetteColor;
+        }
+        /**
+         * Sets Color of the vignette applied on the screen through the chosen blend mode (vignetteBlendMode)
+         * if vignetteEnabled is set to true.
+         */
+        public set vignetteColor(value: Color4) {
+            this.imageProcessingConfiguration.vignetteColor = value;
+        }
+
+        /**
+         * Gets Camera field of view used by the Vignette effect.
+         */
+        public get cameraFov(): number {
+            return this.imageProcessingConfiguration.cameraFov;
+        }
+        /**
+         * Sets Camera field of view used by the Vignette effect.
+         */
+        public set cameraFov(value: number) {
+            this.imageProcessingConfiguration.cameraFov = value;
+        }
+
+        /**
+         * Gets the vignette blend mode allowing different kind of effect.
+         */
+        public get vignetteBlendMode(): number {
+            return this.imageProcessingConfiguration.vignetteBlendMode;
+        }
+        /**
+         * Sets the vignette blend mode allowing different kind of effect.
+         */
+        public set vignetteBlendMode(value: number) {
+            this.imageProcessingConfiguration.vignetteBlendMode = value;
+        }
+
+        /**
+         * Gets wether the vignette effect is enabled.
+         */
+        public get vignetteEnabled(): boolean {
+            return this.imageProcessingConfiguration.vignetteEnabled;
+        }
+        /**
+         * Sets wether the vignette effect is enabled.
+         */
+        public set vignetteEnabled(value: boolean) {
+            this.imageProcessingConfiguration.vignetteEnabled = value;
+        }
+
+        @serialize()
+        private _fromLinearSpace = false;
+        /**
+         * Gets wether the input of the processing is in Gamma or Linear Space.
+         */
         public get fromLinearSpace(): boolean {
         public get fromLinearSpace(): boolean {
             return this._fromLinearSpace;
             return this._fromLinearSpace;
         }
         }
-
+        /**
+         * Sets wether the input of the processing is in Gamma or Linear Space.
+         */
         public set fromLinearSpace(value: boolean) {
         public set fromLinearSpace(value: boolean) {
             if (this._fromLinearSpace === value) {
             if (this._fromLinearSpace === value) {
                 return;
                 return;
@@ -84,122 +262,53 @@
 
 
             this._fromLinearSpace = value;
             this._fromLinearSpace = value;
             this._updateParameters();
             this._updateParameters();
-        }              
-
-        public get cameraToneMappingEnabled(): boolean {
-            return this._cameraToneMappingEnabled;
         }
         }
 
 
-        public set cameraToneMappingEnabled(value: boolean) {
-            if (this._cameraToneMappingEnabled === value) {
-                return;
-            }
-
-            this._cameraToneMappingEnabled = value;
-            this._updateParameters();
-        }               
+        /**
+         * Defines cache preventing GC.
+         */
+        private _defines: IImageProcessingDefines & { FROMLINEARSPACE: boolean } = {
+            VIGNETTE: false,
+            VIGNETTEBLENDMODEMULTIPLY: false,
+            VIGNETTEBLENDMODEOPAQUE: false,
+            TONEMAPPING: false,
+            CONTRAST: false,
+            COLORCURVES: false,
+            COLORGRADING: false,
+            FROMLINEARSPACE: false,
+        }
 
 
         constructor(name: string, options: number | PostProcessOptions, camera?: Camera, samplingMode?: number, engine?: Engine, reusable?: boolean, textureType: number = Engine.TEXTURETYPE_UNSIGNED_INT) {
         constructor(name: string, options: number | PostProcessOptions, camera?: Camera, samplingMode?: number, engine?: Engine, reusable?: boolean, textureType: number = Engine.TEXTURETYPE_UNSIGNED_INT) {
-            super(name, "imageProcessing", [
-                                            'contrast',
-                                            'vignetteSettings1',
-                                            'vignetteSettings2',
-                                            'cameraExposureLinear',
-                                            'vCameraColorCurveNegative',
-                                            'vCameraColorCurveNeutral',
-                                            'vCameraColorCurvePositive',
-                                            'colorTransformSettings'                    
-                                            ], ["txColorTransform"], options, camera, samplingMode, engine, reusable,
+            super(name, "imageProcessing", [], [], options, camera, samplingMode, engine, reusable,
                                             null, textureType, "postprocess", null, true);
                                             null, textureType, "postprocess", null, true);
 
 
+            // Setup the default processing configuration to the scene.
+            this._attachImageProcessingConfiguration(null);
+
             this._updateParameters();
             this._updateParameters();
 
 
             this.onApply = (effect: Effect) => {
             this.onApply = (effect: Effect) => {
-                let aspectRatio = this.aspectRatio;
-                
-                // Color 
-                if (this._colorCurvesEnabled) {
-                    ColorCurves.Bind(this.colorCurves, effect);
-                }
-
-                if (this._vignetteEnabled) {
-                    // Vignette
-                    let vignetteScaleY = Math.tan(this.cameraFov * 0.5);
-                    let vignetteScaleX = vignetteScaleY * aspectRatio;
-
-                    let vignetteScaleGeometricMean = Math.sqrt(vignetteScaleX * vignetteScaleY);
-                    vignetteScaleX = Tools.Mix(vignetteScaleX, vignetteScaleGeometricMean, this.vignetteStretch);
-                    vignetteScaleY = Tools.Mix(vignetteScaleY, vignetteScaleGeometricMean, this.vignetteStretch);
-
-                    effect.setFloat4('vignetteSettings1', vignetteScaleX, vignetteScaleY, -vignetteScaleX * this.vignetteCentreX, -vignetteScaleY * this.vignetteCentreY);
-
-                    let vignettePower = -2.0 * this.vignetteWeight;
-                    effect.setFloat4('vignetteSettings2', this.vignetteColor.r, this.vignetteColor.g, this.vignetteColor.b, vignettePower);
-                }
-
-                // Contrast and exposure
-                effect.setFloat('contrast', this.cameraContrast);
-                effect.setFloat('cameraExposureLinear', Math.pow(2.0, -this.cameraExposure) * Math.PI);
-                
-                // Color transform settings
-                if (this._colorGradingTexture) {
-                    effect.setTexture('txColorTransform', this.colorGradingTexture);
-                    let textureSize = this.colorGradingTexture.getSize().height;
-
-                    effect.setFloat4("colorTransformSettings",
-                        (textureSize - 1) / textureSize, // textureScale
-                        0.5 / textureSize, // textureOffset
-                        textureSize, // textureSize
-                        this.colorGradingWeight // weight
-                    );                
-                }
+                this.imageProcessingConfiguration.bind(effect, this.aspectRatio);
             };
             };
         }
         }
 
 
         protected _updateParameters(): void {
         protected _updateParameters(): void {
+            this._defines.FROMLINEARSPACE = this._fromLinearSpace;
+            this.imageProcessingConfiguration.prepareDefines(this._defines);
             var defines = "";
             var defines = "";
-            var samplers = ["textureSampler"];
-
-            if (this.colorGradingTexture) {
-                defines = "#define COLORGRADING\r\n";
-                samplers.push("txColorTransform");
-            }
-
-            if (this._vignetteEnabled) {
-                defines += "#define VIGNETTE\r\n";
-
-                if (this.vignetteBlendMode === ImageProcessingPostProcess._VIGNETTEMODE_MULTIPLY) {
-                    defines += "#define VIGNETTEBLENDMODEMULTIPLY\r\n";
-                } else {
-                    defines += "#define VIGNETTEBLENDMODEOPAQUE\r\n";
+            for (const define in this._defines) {
+                if (this._defines[define]) {
+                    defines += `#define ${define};\r\n`;
                 }
                 }
             }
             }
 
 
-            if (this.cameraToneMappingEnabled) {
-                defines += "#define TONEMAPPING\r\n";
-            }
-
-            if (this._colorCurvesEnabled && this.colorCurves) {
-                defines += "#define COLORCURVES\r\n";
-            }
-
-            if (this._fromLinearSpace) {
-                defines += "#define FROMLINEARSPACE\r\n";
-            }
-
-            this.updateEffect(defines, null, samplers);
-        }
-
-        // Statics
-        private static _VIGNETTEMODE_MULTIPLY = 0;
-        private static _VIGNETTEMODE_OPAQUE = 1;
+            var samplers = ["textureSampler"];
+            ImageProcessing.PrepareSamplers(samplers, this._defines);
 
 
-        public static get VIGNETTEMODE_MULTIPLY(): number {
-            return ImageProcessingPostProcess._VIGNETTEMODE_MULTIPLY;
-        }
+            var uniforms = [];
+            ImageProcessing.PrepareUniforms(uniforms, this._defines);
 
 
-        public static get VIGNETTEMODE_OPAQUE(): number {
-            return ImageProcessingPostProcess._VIGNETTEMODE_OPAQUE;
+            this.updateEffect(defines, uniforms, samplers);
         }
         }
     }
     }
 }
 }

+ 31 - 1
src/Shaders/ShadersInclude/helperFunctions.fx

@@ -1,4 +1,10 @@
-mat3 transposeMat3(mat3 inMatrix) {
+const float PI = 3.1415926535897932384626433832795;
+
+const float LinearEncodePowerApprox = 2.2;
+const float GammaEncodePowerApprox = 1.0 / LinearEncodePowerApprox;
+const vec3 LuminanceEncodeApprox = vec3(0.2126, 0.7152, 0.0722);
+
+mat3 transposeMat3(mat3 inMatrix) {
 	vec3 i0 = inMatrix[0];
 	vec3 i0 = inMatrix[0];
 	vec3 i1 = inMatrix[1];
 	vec3 i1 = inMatrix[1];
 	vec3 i2 = inMatrix[2];
 	vec3 i2 = inMatrix[2];
@@ -10,4 +16,28 @@
 		);
 		);
 
 
 	return outMatrix;
 	return outMatrix;
+}
+
+vec3 applyEaseInOut(vec3 x){
+	return x * x * (3.0 - 2.0 * x);
+}
+
+vec3 toLinearSpace(vec3 color)
+{
+	return pow(color, vec3(LinearEncodePowerApprox));
+}
+
+vec3 toGammaSpace(vec3 color)
+{
+    return pow(color, vec3(GammaEncodePowerApprox));
+}
+
+float square(float value)
+{
+    return value * value;
+}
+
+float getLuminance(vec3 color)
+{
+    return clamp(dot(color, LuminanceEncodeApprox), 0., 1.);
 }
 }

+ 24 - 0
src/Shaders/ShadersInclude/imageProcessingDeclaration.fx

@@ -0,0 +1,24 @@
+#ifdef CONTRAST
+	uniform float contrast;
+#endif
+
+#ifdef VIGNETTE
+	uniform vec2 vInverseScreenSize;
+	uniform vec4 vignetteSettings1;
+	uniform vec4 vignetteSettings2;
+#endif
+
+#ifdef TONEMAPPING
+	uniform float cameraExposureLinear;
+#endif
+
+#ifdef COLORCURVES
+	uniform vec4 vCameraColorCurveNegative;
+	uniform vec4 vCameraColorCurveNeutral;
+	uniform vec4 vCameraColorCurvePositive;
+#endif
+
+#ifdef COLORGRADING
+	uniform sampler2D txColorTransform;
+	uniform vec4 colorTransformSettings;
+#endif

+ 103 - 0
src/Shaders/ShadersInclude/imageProcessingFunctions.fx

@@ -0,0 +1,103 @@
+#ifdef COLORGRADING
+/** 
+ * Polyfill for SAMPLE_TEXTURE_3D, which is unsupported in WebGL.
+ * sampler3dSetting.x = textureOffset (0.5 / textureSize).
+ * sampler3dSetting.y = textureSize.
+ */
+vec3 sampleTexture3D(sampler2D colorTransform, vec3 color, vec2 sampler3dSetting)
+{
+	float sliceSize = 2.0 * sampler3dSetting.x; // Size of 1 slice relative to the texture, for example 1/8
+
+	float sliceContinuous = (color.y - sampler3dSetting.x) * sampler3dSetting.y;
+	float sliceInteger = floor(sliceContinuous);
+
+	// Note: this is mathematically equivalent to fract(sliceContinuous); but we use explicit subtract
+	// rather than separate fract() for correct results near slice boundaries (matching sliceInteger choice)
+	float sliceFraction = sliceContinuous - sliceInteger;
+
+	vec2 sliceUV = color.xz;
+	
+	sliceUV.x *= sliceSize;
+	sliceUV.x += sliceInteger * sliceSize;
+
+	vec4 slice0Color = texture2D(colorTransform, sliceUV);
+
+	sliceUV.x += sliceSize;
+	vec4 slice1Color = texture2D(colorTransform, sliceUV);
+
+	vec3 result = mix(slice0Color.rgb, slice1Color.rgb, sliceFraction);
+	color.rgb = result.bgr;
+
+	return color;
+}
+#endif
+
+vec4 applyImageProcessing(vec4 result) {
+
+#ifdef TONEMAPPING
+	result.rgb *= cameraExposureLinear;
+#endif
+
+#ifdef VIGNETTE
+		//vignette
+		vec2 viewportXY = gl_FragCoord.xy * vInverseScreenSize;
+		viewportXY = viewportXY * 2.0 - 1.0
+		vec3 vignetteXY1 = vec3(viewportXY * vignetteSettings1.xy + vignetteSettings1.zw, 1.0);
+		float vignetteTerm = dot(vignetteXY1, vignetteXY1);
+		float vignette = pow(vignetteTerm, vignetteSettings2.w);
+
+		// Interpolate between the artist 'color' and white based on the physical transmission value 'vignette'.
+		vec3 vignetteColor = vignetteSettings2.rgb;
+
+	#ifdef VIGNETTEBLENDMODEMULTIPLY
+		vec3 vignetteColorMultiplier = mix(vignetteColor, vec3(1, 1, 1), vignette);
+		result.rgb *= vignetteColorMultiplier;
+	#endif
+
+	#ifdef VIGNETTEBLENDMODEOPAQUE
+		result.rgb = mix(vignetteColor, result.rgb, vignette);
+	#endif
+#endif
+	
+#ifdef TONEMAPPING
+	float tonemappingCalibration = 1.590579;
+	result.rgb = 1.0 - exp2(-tonemappingCalibration * result.rgb);
+#endif
+
+	// Going back to gamma space
+	result.rgb = toGammaSpace(result.rgb);
+	result.rgb = clamp(result.rgb, 0.0, 1.0);
+
+#ifdef CONTRAST
+	// Contrast
+	vec3 resultHighContrast = applyEaseInOut(result.rgb);
+
+	if (contrast < 1.0) {
+		// Decrease contrast: interpolate towards zero-contrast image (flat grey)
+		result.rgb = mix(vec3(0.5, 0.5, 0.5), result.rgb, contrast);
+	} else {
+		// Increase contrast: apply simple shoulder-toe high contrast curve
+		result.rgb = mix(result.rgb, resultHighContrast, contrast - 1.0);
+	}
+#endif
+
+	// Apply Color Transform
+#ifdef COLORGRADING
+	vec3 colorTransformInput = result.rgb * colorTransformSettings.xxx + colorTransformSettings.yyy;
+	vec3 colorTransformOutput = sampleTexture3D(txColorTransform, colorTransformInput, colorTransformSettings.yz).rgb;
+
+	result.rgb = mix(result.rgb, colorTransformOutput, colorTransformSettings.www);
+#endif
+
+#ifdef COLORCURVES
+	// Apply Color Curves
+	float luma = getLuminance(result.rgb);
+	vec2 curveMix = clamp(vec2(luma * 3.0 - 1.5, luma * -3.0 + 1.5), vec2(0.0), vec2(1.0));
+	vec4 colorCurve = vCameraColorCurveNeutral + curveMix.x * vCameraColorCurvePositive - curveMix.y * vCameraColorCurveNegative;
+
+	result.rgb *= colorCurve.rgb;
+	result.rgb = mix(vec3(luma), result.rgb, colorCurve.a);
+#endif
+
+	return result;
+}

+ 0 - 9
src/Shaders/ShadersInclude/pbrFragmentDeclaration.fx

@@ -44,15 +44,6 @@ uniform vec3 vReflectivityInfos;
 uniform vec2 vMicroSurfaceSamplerInfos;
 uniform vec2 vMicroSurfaceSamplerInfos;
 #endif
 #endif
 
 
-#ifdef OPACITYFRESNEL
-uniform vec4 opacityParts;
-#endif
-
-#ifdef EMISSIVEFRESNEL
-uniform vec4 emissiveLeftColor;
-uniform vec4 emissiveRightColor;
-#endif
-
 // Refraction Reflection
 // Refraction Reflection
 #if defined(REFLECTIONMAP_SPHERICAL) || defined(REFLECTIONMAP_PROJECTION) || defined(REFRACTION)
 #if defined(REFLECTIONMAP_SPHERICAL) || defined(REFLECTIONMAP_PROJECTION) || defined(REFRACTION)
 uniform mat4 view;
 uniform mat4 view;

+ 11 - 64
src/Shaders/ShadersInclude/pbrFunctions.fx

@@ -3,25 +3,14 @@
 #define FRESNEL_MAXIMUM_ON_ROUGH 0.25
 #define FRESNEL_MAXIMUM_ON_ROUGH 0.25
 
 
 // PBR CUSTOM CONSTANTS
 // PBR CUSTOM CONSTANTS
-const float kPi = 3.1415926535897932384626433832795;
 const float kRougnhessToAlphaScale = 0.1;
 const float kRougnhessToAlphaScale = 0.1;
 const float kRougnhessToAlphaOffset = 0.29248125;
 const float kRougnhessToAlphaOffset = 0.29248125;
 
 
-float Square(float value)
-{
-    return value * value;
-}
-
-float getLuminance(vec3 color)
-{
-    return clamp(dot(color, vec3(0.2126, 0.7152, 0.0722)), 0., 1.);
-}
-
 float convertRoughnessToAverageSlope(float roughness)
 float convertRoughnessToAverageSlope(float roughness)
 {
 {
     // Calculate AlphaG as square of roughness; add epsilon to avoid numerical issues
     // Calculate AlphaG as square of roughness; add epsilon to avoid numerical issues
     const float kMinimumVariance = 0.0005;
     const float kMinimumVariance = 0.0005;
-    float alphaG = Square(roughness) + kMinimumVariance;
+    float alphaG = square(roughness) + kMinimumVariance;
     return alphaG;
     return alphaG;
 }
 }
 
 
@@ -68,9 +57,9 @@ float normalDistributionFunction_TrowbridgeReitzGGX(float NdotH, float alphaG)
     // Note: alphaG is average slope (gradient) of the normals in slope-space.
     // Note: alphaG is average slope (gradient) of the normals in slope-space.
     // It is also the (trigonometric) tangent of the median distribution value, i.e. 50% of normals have
     // It is also the (trigonometric) tangent of the median distribution value, i.e. 50% of normals have
     // a tangent (gradient) closer to the macrosurface than this slope.
     // a tangent (gradient) closer to the macrosurface than this slope.
-    float a2 = Square(alphaG);
+    float a2 = square(alphaG);
     float d = NdotH * NdotH * (a2 - 1.0) + 1.0;
     float d = NdotH * NdotH * (a2 - 1.0) + 1.0;
-    return a2 / (kPi * d * d);
+    return a2 / (PI * d * d);
 }
 }
 
 
 vec3 fresnelSchlickGGX(float VdotH, vec3 reflectance0, vec3 reflectance90)
 vec3 fresnelSchlickGGX(float VdotH, vec3 reflectance0, vec3 reflectance90)
@@ -78,7 +67,7 @@ vec3 fresnelSchlickGGX(float VdotH, vec3 reflectance0, vec3 reflectance90)
     return reflectance0 + (reflectance90 - reflectance0) * pow(clamp(1.0 - VdotH, 0., 1.), 5.0);
     return reflectance0 + (reflectance90 - reflectance0) * pow(clamp(1.0 - VdotH, 0., 1.), 5.0);
 }
 }
 
 
-vec3 FresnelSchlickEnvironmentGGX(float VdotN, vec3 reflectance0, vec3 reflectance90, float smoothness)
+vec3 fresnelSchlickEnvironmentGGX(float VdotN, vec3 reflectance0, vec3 reflectance90, float smoothness)
 {
 {
     // Schlick fresnel approximation, extended with basic smoothness term so that rough surfaces do not approach reflectance90 at grazing angle
     // Schlick fresnel approximation, extended with basic smoothness term so that rough surfaces do not approach reflectance90 at grazing angle
     float weight = mix(FRESNEL_MAXIMUM_ON_ROUGH, 1.0, smoothness);
     float weight = mix(FRESNEL_MAXIMUM_ON_ROUGH, 1.0, smoothness);
@@ -109,7 +98,7 @@ float computeDiffuseTerm(float NdotL, float NdotV, float VdotH, float roughness)
         (1.0 + (diffuseFresnel90 - 1.0) * diffuseFresnelNL) *
         (1.0 + (diffuseFresnel90 - 1.0) * diffuseFresnelNL) *
         (1.0 + (diffuseFresnel90 - 1.0) * diffuseFresnelNV);
         (1.0 + (diffuseFresnel90 - 1.0) * diffuseFresnelNV);
 
 
-    return fresnel * NdotL / kPi;
+    return fresnel * NdotL / PI;
 }
 }
 
 
 float adjustRoughnessFromLightProperties(float roughness, float lightRadius, float lightDistance)
 float adjustRoughnessFromLightProperties(float roughness, float lightRadius, float lightDistance)
@@ -136,51 +125,9 @@ float computeDefaultMicroSurface(float microSurface, vec3 reflectivityColor)
     return microSurface;
     return microSurface;
 }
 }
 
 
-vec3 toLinearSpace(vec3 color)
-{
-    return vec3(pow(color.r, 2.2), pow(color.g, 2.2), pow(color.b, 2.2));
-}
-
-vec3 toGammaSpace(vec3 color)
-{
-    return vec3(pow(color.r, 1.0 / 2.2), pow(color.g, 1.0 / 2.2), pow(color.b, 1.0 / 2.2));
-}
-
-#ifdef CAMERATONEMAP
-    vec3 toneMaps(vec3 color)
-    {
-        color = max(color, 0.0);
-
-        // TONE MAPPING / EXPOSURE
-        color.rgb = color.rgb * vCameraInfos.x;
-
-        float tuning = 1.5; // TODO: sync up so e.g. 18% greys are matched to exposure appropriately
-        // PI Test
-        // tuning *=  kPi;
-        vec3 tonemapped = 1.0 - exp2(-color.rgb * tuning); // simple local photographic tonemapper
-        color.rgb = mix(color.rgb, tonemapped, 1.0);
-        return color;
-    }
-#endif
-
-#ifdef CAMERACONTRAST
-    vec4 contrasts(vec4 color)
-    {
-        color = clamp(color, 0.0, 1.0);
-
-        vec3 resultHighContrast = color.rgb * color.rgb * (3.0 - 2.0 * color.rgb);
-        float contrast = vCameraInfos.y;
-        if (contrast < 1.0)
-        {
-            // Decrease contrast: interpolate towards zero-contrast image (flat grey)
-            color.rgb = mix(vec3(0.5, 0.5, 0.5), color.rgb, contrast);
-        }
-        else
-        {
-            // Increase contrast: apply simple shoulder-toe high contrast curve
-            color.rgb = mix(color.rgb, resultHighContrast, contrast - 1.0);
-        }
-
-        return color;
-    }
-#endif
+// For typical incident reflectance range (between 4% to 100%) set the grazing reflectance to 100% for typical fresnel effect.
+// For very low reflectance range on highly diffuse objects (below 4%), incrementally reduce grazing reflecance to 0%.
+float fresnelGrazingReflectance(float reflectance0) {
+	float reflectance90 = clamp(reflectance0 * 25.0, 0.0, 1.0);
+	return reflectance90;
+}

+ 0 - 3
src/Shaders/ShadersInclude/pbrUboDeclaration.fx

@@ -30,9 +30,6 @@ uniform Material
 	uniform vec2 vMicrosurfaceTextureLods;
 	uniform vec2 vMicrosurfaceTextureLods;
 	uniform vec4 vReflectivityColor;
 	uniform vec4 vReflectivityColor;
 	uniform vec3 vEmissiveColor;
 	uniform vec3 vEmissiveColor;
-	uniform vec4 opacityParts;
-	uniform vec4 emissiveLeftColor;
-	uniform vec4 emissiveRightColor;
 
 
 	uniform float pointSize;
 	uniform float pointSize;
 };
 };

+ 9 - 13
src/Shaders/default.fragment.fx

@@ -99,15 +99,11 @@ varying vec3 vDirectionW;
 
 
 #endif
 #endif
 
 
-#ifdef CAMERACOLORGRADING
-	#include<colorGradingDefinition>	
-	#include<colorGrading>
-#endif
+#include<imageProcessingDeclaration>
 
 
-#ifdef CAMERACOLORCURVES
-	#include<colorCurvesDefinition>
-	#include<colorCurves>
-#endif
+#include<helperFunctions>
+
+#include<imageProcessingFunctions>
 
 
 #include<bumpFragmentFunctions>
 #include<bumpFragmentFunctions>
 #include<clipPlaneFragmentDeclaration>
 #include<clipPlaneFragmentDeclaration>
@@ -361,12 +357,12 @@ void main(void) {
 #include<logDepthFragment>
 #include<logDepthFragment>
 #include<fogFragment>
 #include<fogFragment>
 
 
-#ifdef CAMERACOLORGRADING
-	color = colorGrades(color);
+// Apply image processing if relevant. As this applies in linear space, 
+// We first move from gamma to linear.
+#ifdef IMAGEPROCESSING
+	color.rgb = toLinearSpace(result.rgb);
+	color.rgb = applyImageProcessing(color);
 #endif
 #endif
 
 
-#ifdef CAMERACOLORCURVES
-	color.rgb = applyColorCurves(color.rgb);
-#endif
 	gl_FragColor = color;
 	gl_FragColor = color;
 }
 }

+ 14 - 115
src/Shaders/imageProcessing.fragment.fx

@@ -2,130 +2,29 @@
 varying vec2 vUV;
 varying vec2 vUV;
 uniform sampler2D textureSampler;
 uniform sampler2D textureSampler;
 
 
-const float GammaEncodePowerApprox = 1.0 / 2.2;
-const float LinearEncodePowerApprox = 2.2;
-const vec3 RGBLuminanceCoefficients = vec3(0.2126, 0.7152, 0.0722);
+#include<imageProcessingDeclaration>
 
 
-uniform float contrast;
-uniform vec4 vignetteSettings1;
-uniform vec4 vignetteSettings2;
-uniform float cameraExposureLinear;
-uniform vec4 vCameraColorCurveNegative;
-uniform vec4 vCameraColorCurveNeutral;
-uniform vec4 vCameraColorCurvePositive;
-uniform sampler2D txColorTransform;
-uniform vec4 colorTransformSettings;
+#include<helperFunctions>
 
 
-vec3 applyEaseInOut(vec3 x){
-	return x * x * (3.0 - 2.0 * x);
-}
+#include<imageProcessingFunctions>
 
 
-/** 
- * Polyfill for SAMPLE_TEXTURE_3D, which is unsupported in WebGL.
- * colorTransformSettings.y = textureOffset (0.5 / textureSize).
- * colorTransformSettings.z = textureSize.
- */
-vec3 sampleTexture3D(sampler2D colorTransform, vec3 color)
+void main(void)
 {
 {
-	float sliceSize = 2.0 * colorTransformSettings.y; // Size of 1 slice relative to the texture, for example 1/8
-
-	float sliceContinuous = (color.y - colorTransformSettings.y) * colorTransformSettings.z;
-	float sliceInteger = floor(sliceContinuous);
-
-	// Note: this is mathematically equivalent to fract(sliceContinuous); but we use explicit subtract
-	// rather than separate fract() for correct results near slice boundaries (matching sliceInteger choice)
-	float sliceFraction = sliceContinuous - sliceInteger;
-
-	vec2 sliceUV = color.xz;
-	
-	sliceUV.x *= sliceSize;
-	sliceUV.x += sliceInteger * sliceSize;
-
-	vec4 slice0Color = texture2D(colorTransform, sliceUV);
-
-	sliceUV.x += sliceSize;
-	vec4 slice1Color = texture2D(colorTransform, sliceUV);
-
-	vec3 result = mix(slice0Color.rgb, slice1Color.rgb, sliceFraction);
-	color.rgb = result.bgr;
-
-	return color;
-}
-
-vec4 applyImageProcessing(vec4 result, vec2 viewportXY){
-
-#ifndef FROMLINEARSPACE
-	// Need to move to linear space for subsequent operations
-	result.rgb = pow(result.rgb, vec3(LinearEncodePowerApprox));
-#endif
-
-	result.rgb *= cameraExposureLinear;
-
-#ifdef VIGNETTE
-		//vignette
-		vec3 vignetteXY1 = vec3(viewportXY * vignetteSettings1.xy + vignetteSettings1.zw, 1.0);
-		float vignetteTerm = dot(vignetteXY1, vignetteXY1);
-		float vignette = pow(vignetteTerm, vignetteSettings2.w);
-
-		// Interpolate between the artist 'color' and white based on the physical transmission value 'vignette'.
-		vec3 vignetteColor = vignetteSettings2.rgb;
+	vec4 result = texture2D(textureSampler, vUV);
 
 
-	#ifdef VIGNETTEBLENDMODEMULTIPLY
-		vec3 vignetteColorMultiplier = mix(vignetteColor, vec3(1, 1, 1), vignette);
-		result.rgb *= vignetteColorMultiplier;
+#ifdef IMAGEPROCESSING
+	#ifndef FROMLINEARSPACE
+		// Need to move to linear space for subsequent operations.
+		result.rgb = toLinearSpace(result.rgb);
 	#endif
 	#endif
 
 
-	#ifdef VIGNETTEBLENDMODEOPAQUE
-		result.rgb = mix(vignetteColor, result.rgb, vignette);
+	result = applyImageProcessing(result);
+#else
+	// In case where the input is in linear space we at least need to put it back in gamma.
+	#ifdef FROMLINEARSPACE
+		result = applyImageProcessing(result);
 	#endif
 	#endif
-
 #endif
 #endif
-	
-#ifdef TONEMAPPING	
-	float tonemappingCalibration = 1.590579;
-	result.rgb = 1.0 - exp2(-tonemappingCalibration * result.rgb);
-#endif
-
-	// Going back to gamma space
-	result.rgb = pow(result.rgb, vec3(GammaEncodePowerApprox));
-	result.rgb = clamp(result.rgb, 0.0, 1.0);
-
-	// Contrast
-	vec3 resultHighContrast = applyEaseInOut(result.rgb);
-
-	if (contrast < 1.0) {
-		result.rgb = mix(vec3(0.5, 0.5, 0.5), result.rgb, contrast);
-	} else {
-		result.rgb = mix(result.rgb, resultHighContrast, contrast - 1.0);
-	}
-
-	// Apply Color Transform
-#ifdef COLORGRADING
-	vec3 colorTransformInput = result.rgb * colorTransformSettings.xxx + colorTransformSettings.yyy;
-	vec3 colorTransformOutput = sampleTexture3D(txColorTransform, colorTransformInput).rgb;
-
-	result.rgb = mix(result.rgb, colorTransformOutput, colorTransformSettings.www);
-#endif
-
-#ifdef COLORCURVES
-	// Apply Color Curves
-	float luma = dot(result.rgb, RGBLuminanceCoefficients);
-	vec2 curveMix = clamp(vec2(luma * 3.0 - 1.5, luma * -3.0 + 1.5), vec2(0.0), vec2(1.0));
-	vec4 colorCurve = vCameraColorCurveNeutral + curveMix.x * vCameraColorCurvePositive - curveMix.y * vCameraColorCurveNegative;
-
-	result.rgb *= colorCurve.rgb;
-	result.rgb = mix(vec3(luma), result.rgb, colorCurve.a);
-#endif
-
-	return result;
-}
-
-void main(void) 
-{
-	vec4 result = texture2D(textureSampler, vUV);
-
-	vec2 viewportXY = vUV * 2.0 - 1.0;
-	result = applyImageProcessing(result, viewportXY);
 
 
 	gl_FragColor = result;
 	gl_FragColor = result;
 }
 }

+ 175 - 168
src/Shaders/pbr.fragment.fx

@@ -68,13 +68,9 @@ varying vec2 vMicroSurfaceSamplerUV;
 uniform sampler2D microSurfaceSampler;
 uniform sampler2D microSurfaceSampler;
 #endif
 #endif
 
 
-// Fresnel
-#include<fresnelFunction>
-
 // Refraction
 // Refraction
 #ifdef REFRACTION
 #ifdef REFRACTION
 
 
-
 #ifdef REFRACTIONMAP_3D
 #ifdef REFRACTIONMAP_3D
 uniform samplerCube refractionCubeSampler;
 uniform samplerCube refractionCubeSampler;
 #else
 #else
@@ -84,50 +80,41 @@ uniform sampler2D refraction2DSampler;
 
 
 // Reflection
 // Reflection
 #ifdef REFLECTION
 #ifdef REFLECTION
+	#ifdef REFLECTIONMAP_3D
+		uniform samplerCube reflectionCubeSampler;
+	#else
+		uniform sampler2D reflection2DSampler;
+	#endif
 
 
-#ifdef REFLECTIONMAP_3D
-uniform samplerCube reflectionCubeSampler;
-#else
-uniform sampler2D reflection2DSampler;
-#endif
+	#ifdef REFLECTIONMAP_SKYBOX
+		varying vec3 vPositionUVW;
+	#else
+		#if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED)
+			varying vec3 vDirectionW;
+		#endif
 
 
-#ifdef REFLECTIONMAP_SKYBOX
-varying vec3 vPositionUVW;
-#else
-#if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED)
-varying vec3 vDirectionW;
-#endif
+	#endif
 
 
+	#include<reflectionFunction>
 #endif
 #endif
 
 
-#include<reflectionFunction>
-
+// Forces linear space for image processing
+#ifndef FROMLINEARSPACE
+	#define FROMLINEARSPACE;
 #endif
 #endif
 
 
-#ifdef CAMERACOLORGRADING
-	#include<colorGradingDefinition>
-#endif
+#include<imageProcessingDeclaration>
 
 
-#ifdef CAMERACOLORCURVES
-	#include<colorCurvesDefinition>
-#endif
+#include<helperFunctions>
+
+#include<imageProcessingFunctions>
 
 
 // PBR
 // PBR
 #include<shadowsFragmentFunctions>
 #include<shadowsFragmentFunctions>
 #include<pbrFunctions>
 #include<pbrFunctions>
-
-#ifdef CAMERACOLORGRADING
-	#include<colorGrading>
-#endif
-
-#ifdef CAMERACOLORCURVES
-	#include<colorCurves>
-#endif
-
 #include<harmonicsFunctions>
 #include<harmonicsFunctions>
 #include<pbrLightFunctions>
 #include<pbrLightFunctions>
 
 
-#include<helperFunctions>
 #include<bumpFragmentFunctions>
 #include<bumpFragmentFunctions>
 #include<clipPlaneFragmentDeclaration>
 #include<clipPlaneFragmentDeclaration>
 #include<logDepthDeclaration>
 #include<logDepthDeclaration>
@@ -138,9 +125,10 @@ varying vec3 vDirectionW;
 void main(void) {
 void main(void) {
 #include<clipPlaneFragment>
 #include<clipPlaneFragment>
 
 
+// _______________________________________________________________________________
+// _____________________________ Geometry Information ____________________________
 	vec3 viewDirectionW = normalize(vEyePosition - vPositionW);
 	vec3 viewDirectionW = normalize(vEyePosition - vPositionW);
 
 
-	// Bump
 #ifdef NORMAL
 #ifdef NORMAL
 	vec3 normalW = normalize(vNormalW);
 	vec3 normalW = normalize(vNormalW);
 #else
 #else
@@ -153,6 +141,7 @@ void main(void) {
 	normalW = gl_FrontFacing ? normalW : -normalW;
 	normalW = gl_FrontFacing ? normalW : -normalW;
 #endif
 #endif
 
 
+// _____________________________ Albedo Information ______________________________
 	// Albedo
 	// Albedo
 	vec3 surfaceAlbedo = vAlbedoColor.rgb;
 	vec3 surfaceAlbedo = vAlbedoColor.rgb;
 
 
@@ -169,15 +158,32 @@ void main(void) {
 	surfaceAlbedo *= vAlbedoInfos.y;
 	surfaceAlbedo *= vAlbedoInfos.y;
 #endif
 #endif
 
 
-#ifndef LINKREFRACTIONTOTRANSPARENCY
-	#if defined(ALPHATEST) && defined(ALPHATESTVALUE)
+// _____________________________ Alpha Information _______________________________
+#ifdef OPACITY
+	vec4 opacityMap = texture2D(opacitySampler, vOpacityUV + uvOffset);
+
+	#ifdef OPACITYRGB
+		alpha = getLuminance(opacityMap.rgb);
+	#else
+		alpha *= opacityMap.a;
+	#endif
+
+	alpha *= vOpacityInfos.y;
+#endif
+
+#ifdef VERTEXALPHA
+	alpha *= vColor.a;
+#endif
+
+#if !defined(LINKREFRACTIONTOTRANSPARENCY) && !defined(ALPHAFRESNEL)
+	#ifdef ALPHATEST
 		if (alpha <= ALPHATESTVALUE)
 		if (alpha <= ALPHATESTVALUE)
 			discard;
 			discard;
-			
-			#ifndef ALPHABLEND
-				// Prevent to blend with the canvas.
-				alpha = 1.0;
-			#endif
+
+		#ifndef ALPHABLEND
+			// Prevent to blend with the canvas.
+			alpha = 1.0;
+		#endif
 	#endif
 	#endif
 #endif
 #endif
 
 
@@ -185,7 +191,7 @@ void main(void) {
 	surfaceAlbedo *= vColor.rgb;
 	surfaceAlbedo *= vColor.rgb;
 #endif
 #endif
 
 
-	// Ambient color
+// _____________________________ AO    Information _______________________________
 	vec3 ambientOcclusionColor = vec3(1., 1., 1.);
 	vec3 ambientOcclusionColor = vec3(1., 1., 1.);
 
 
 #ifdef AMBIENT
 #ifdef AMBIENT
@@ -196,7 +202,7 @@ void main(void) {
 	ambientOcclusionColor = mix(ambientOcclusionColor, ambientOcclusionColorMap, vAmbientInfos.z);
 	ambientOcclusionColor = mix(ambientOcclusionColor, ambientOcclusionColorMap, vAmbientInfos.z);
 #endif
 #endif
 
 
-	// Reflectivity map
+// _____________________________ Reflectivity Info _______________________________
 	float microSurface = vReflectivityColor.a;
 	float microSurface = vReflectivityColor.a;
 	vec3 surfaceReflectivityColor = vReflectivityColor.rgb;
 	vec3 surfaceReflectivityColor = vReflectivityColor.rgb;
 
 
@@ -268,78 +274,44 @@ void main(void) {
 	#endif
 	#endif
 #endif
 #endif
 
 
-	// Compute N dot V.
-	float NdotV = clamp(dot(normalW, viewDirectionW),0., 1.) + 0.00001;
-
 	// Adapt microSurface.
 	// Adapt microSurface.
 	microSurface = clamp(microSurface, 0., 1.);
 	microSurface = clamp(microSurface, 0., 1.);
-
 	// Compute roughness.
 	// Compute roughness.
 	float roughness = 1. - microSurface;
 	float roughness = 1. - microSurface;
 
 
-	#ifdef LIGHTMAP
-  		vec3 lightmapColor = texture2D(lightmapSampler, vLightmapUV + uvOffset).rgb * vLightmapInfos.y;
-  	#endif
-
-	float NdotL = -1.;
+// _____________________________ Alpha Fresnel ___________________________________
+#ifdef ALPHAFRESNEL
+	// Convert approximate perceptual opacity (gamma-encoded opacity) to linear opacity (absorptance, or inverse transmission)
+	// for use with the linear HDR render target. The final composition will be converted back to gamma encoded values for eventual display.
+	// Uses power 2.0 rather than 2.2 for simplicity/efficiency, and because the mapping does not need to map the gamma applied to RGB.
+	float opacityPerceptual = alpha;
+	float opacity0 = opacityPerceptual * opacityPerceptual;
+	float opacity90 = fresnelGrazingReflectance(opacity0);
 
 
-	// Compute reflectance.
-	float reflectance = max(max(surfaceReflectivityColor.r, surfaceReflectivityColor.g), surfaceReflectivityColor.b);
+	vec3 normalForward = faceforward(normalW, -viewDirectionW, normalW);
 
 
-	// For typical incident reflectance range (between 4% to 100%) set the grazing reflectance to 100% for typical fresnel effect.
-    // For very low reflectance range on highly diffuse objects (below 4%), incrementally reduce grazing reflecance to 0%.
-    float reflectance90 = clamp(reflectance * 25.0, 0.0, 1.0);
-	vec3 specularEnvironmentR0 = surfaceReflectivityColor.rgb;
-	vec3 specularEnvironmentR90 = vec3(1.0, 1.0, 1.0) * reflectance90;
-
-	// Lighting
-	vec3 diffuseBase = vec3(0., 0., 0.);
-
-#ifdef SPECULARTERM
-	vec3 specularBase = vec3(0., 0., 0.);
-#endif
+	// Calculate the appropriate linear opacity for the current viewing angle (formally, this quantity is the "directional absorptance").
+	alpha = fresnelSchlickEnvironmentGGX(clamp(dot(V, normalForward), 0.0, 1.0), vec3(opacity0), vec3(opacity90), sqrt(microSurface)).x;
 	
 	
-	lightingInfo info;
-	float shadow = 1.; // 1 - shadowLevel
-
-#include<lightFragment>[0..maxSimultaneousLights]
-
-	vec3 lightDiffuseContribution = diffuseBase;
-
-#ifdef SPECULARTERM
-	vec3 lightSpecularContribution = specularBase * vLightingIntensity.w;
-#endif
-
-#ifdef OPACITY
-	vec4 opacityMap = texture2D(opacitySampler, vOpacityUV + uvOffset);
+	#ifdef ALPHATEST
+		if (alpha <= ALPHATESTVALUE)
+			discard;
 
 
-	#ifdef OPACITYRGB
-		opacityMap.rgb = opacityMap.rgb * vec3(0.3, 0.59, 0.11);
-		alpha *= (opacityMap.x + opacityMap.y + opacityMap.z)* vOpacityInfos.y;
-	#else
-		alpha *= opacityMap.a * vOpacityInfos.y;
+		#ifndef ALPHABLEND
+			// Prevent to blend with the canvas.
+			alpha = 1.0;
+		#endif
 	#endif
 	#endif
 #endif
 #endif
 
 
-#ifdef VERTEXALPHA
-	alpha *= vColor.a;
-#endif
-
-#ifdef OPACITYFRESNEL
-	float opacityFresnelTerm = computeFresnelTerm(viewDirectionW, normalW, opacityParts.z, opacityParts.w);
-
-	alpha += opacityParts.x * (1.0 - opacityFresnelTerm) + opacityFresnelTerm * opacityParts.y;
-#endif
-
-	// Refraction
-	vec3 surfaceRefractionColor = vec3(0., 0., 0.);
-
-	// Go mat -> blurry reflexion according to microSurface
+// _____________________________ Compute LODs Fetch ____________________________________
 #ifdef LODBASEDMICROSFURACE
 #ifdef LODBASEDMICROSFURACE
 	float alphaG = convertRoughnessToAverageSlope(roughness);
 	float alphaG = convertRoughnessToAverageSlope(roughness);
 #endif
 #endif
 
 
+// _____________________________ Refraction Info _______________________________________
 #ifdef REFRACTION
 #ifdef REFRACTION
+	vec3 surfaceRefractionColor = vec3(0., 0., 0.);
 	vec3 refractionVector = refract(-viewDirectionW, normalW, vRefractionInfos.y);
 	vec3 refractionVector = refract(-viewDirectionW, normalW, vRefractionInfos.y);
 
 
 	#ifdef LODBASEDMICROSFURACE
 	#ifdef LODBASEDMICROSFURACE
@@ -397,11 +369,10 @@ void main(void) {
 	#endif
 	#endif
 #endif
 #endif
 
 
-	// Reflection
+// _____________________________ Reflection Info _______________________________________
+#ifdef REFLECTION
 	vec3 environmentRadiance = vReflectionColor.rgb;
 	vec3 environmentRadiance = vReflectionColor.rgb;
 	vec3 environmentIrradiance = vReflectionColor.rgb;
 	vec3 environmentIrradiance = vReflectionColor.rgb;
-
-#ifdef REFLECTION
 	vec3 vReflectionUVW = computeReflectionCoords(vec4(vPositionW, 1.0), normalW);
 	vec3 vReflectionUVW = computeReflectionCoords(vec4(vPositionW, 1.0), normalW);
 
 
 	#ifdef LODBASEDMICROSFURACE
 	#ifdef LODBASEDMICROSFURACE
@@ -468,16 +439,39 @@ void main(void) {
 	#endif
 	#endif
 #endif
 #endif
 
 
-	environmentRadiance *= vLightingIntensity.z;
-	environmentIrradiance *= vLightingIntensity.z;
+// ____________________________________________________________________________________
+// _____________________________ Direct Lighting Param ________________________________
+	// Compute N dot V.
+	float NdotV = clamp(dot(normalW, viewDirectionW),0., 1.) + 0.00001;
 
 
-	// Specular Environment Fresnel.
-	vec3 specularEnvironmentReflectance = FresnelSchlickEnvironmentGGX(clamp(NdotV, 0., 1.), specularEnvironmentR0, specularEnvironmentR90, sqrt(microSurface));
+	// Compute reflectance.
+	float reflectance = max(max(surfaceReflectivityColor.r, surfaceReflectivityColor.g), surfaceReflectivityColor.b);
+	float reflectance90 = fresnelGrazingReflectance(reflectance);
+	vec3 specularEnvironmentR0 = surfaceReflectivityColor.rgb;
+	vec3 specularEnvironmentR90 = vec3(1.0, 1.0, 1.0) * reflectance90;
 
 
-	// Compute refractance
-	vec3 refractance = vec3(0.0, 0.0, 0.0);
+	// Environment Reflectance
+	vec3 specularEnvironmentReflectance = fresnelSchlickEnvironmentGGX(clamp(NdotV, 0., 1.), specularEnvironmentR0, specularEnvironmentR90, sqrt(microSurface));
+
+// _____________________________ Direct Lighting Info __________________________________
+	vec3 diffuseBase = vec3(0., 0., 0.);
+#ifdef SPECULARTERM
+	vec3 specularBase = vec3(0., 0., 0.);
+#endif
+
+#ifdef LIGHTMAP
+	vec3 lightmapColor = texture2D(lightmapSampler, vLightmapUV + uvOffset).rgb * vLightmapInfos.y;
+#endif
+
+	lightingInfo info;
+	float shadow = 1.; // 1 - shadowLevel
+	float NdotL = -1.;
+
+#include<lightFragment>[0..maxSimultaneousLights]
 
 
+// _____________________________ Refractance+Tint ________________________________
 #ifdef REFRACTION
 #ifdef REFRACTION
+	vec3 refractance = vec3(0.0, 0.0, 0.0);
 	vec3 transmission = vec3(1.0, 1.0, 1.0);
 	vec3 transmission = vec3(1.0, 1.0, 1.0);
 	#ifdef LINKREFRACTIONTOTRANSPARENCY
 	#ifdef LINKREFRACTIONTOTRANSPARENCY
 		// Transmission based on alpha.
 		// Transmission based on alpha.
@@ -510,66 +504,90 @@ void main(void) {
 	transmission *= 1.0 - specularEnvironmentReflectance;
 	transmission *= 1.0 - specularEnvironmentReflectance;
 
 
 	// Should baked in diffuse.
 	// Should baked in diffuse.
-	refractance = surfaceRefractionColor * transmission;
+	refractance = transmission;
 #endif
 #endif
 
 
-	// Apply Energy Conservation taking in account the environment level only if the environment is present.
+// ______________________________________________________________________________
+// _____________________________ Energy Conservation  ___________________________
+	// Apply Energy Conservation taking in account the environment level only if 
+	// the environment is present.
 	surfaceAlbedo.rgb = (1. - reflectance) * surfaceAlbedo.rgb;
 	surfaceAlbedo.rgb = (1. - reflectance) * surfaceAlbedo.rgb;
 
 
-	refractance *= vLightingIntensity.z;
-	environmentRadiance *= specularEnvironmentReflectance;
+// _____________________________ Diffuse ________________________________________
+	vec3 finalDiffuse = diffuseBase;
+	finalDiffuse.rgb += vAmbientColor;
+	finalDiffuse *= surfaceAlbedo.rgb;
+	finalDiffuse = max(finalDiffuse, 0.0);
 
 
-	// Emissive
-	vec3 surfaceEmissiveColor = vEmissiveColor;
-#ifdef EMISSIVE
-	vec3 emissiveColorTex = texture2D(emissiveSampler, vEmissiveUV + uvOffset).rgb;
-	surfaceEmissiveColor = toLinearSpace(emissiveColorTex.rgb) * surfaceEmissiveColor;
-	surfaceEmissiveColor *=  vEmissiveInfos.y;
+// _____________________________ Irradiance ______________________________________
+#ifdef REFLECTION
+	vec3 finalIrradiance = environmentIrradiance;
+	finalIrradiance *= surfaceAlbedo.rgb;
 #endif
 #endif
 
 
-#ifdef EMISSIVEFRESNEL
-	float emissiveFresnelTerm = computeFresnelTerm(viewDirectionW, normalW, emissiveRightColor.a, emissiveLeftColor.a);
-
-	surfaceEmissiveColor *= emissiveLeftColor.rgb * (1.0 - emissiveFresnelTerm) + emissiveFresnelTerm * emissiveRightColor.rgb;
+// _____________________________ Specular ________________________________________
+#ifdef SPECULARTERM
+	vec3 finalSpecular = specularBase;
+	finalSpecular *= surfaceReflectivityColor;
+	finalSpecular = max(finalSpecular, 0.0);
 #endif
 #endif
 
 
-	// Composition
-	vec3 finalDiffuse = lightDiffuseContribution;
-#ifndef EMISSIVEASILLUMINATION
-	finalDiffuse += surfaceEmissiveColor;
+// _____________________________ Radiance_________________________________________
+#ifdef REFLECTION
+	vec3 finalRadiance = environmentRadiance;
+	finalRadiance *= specularEnvironmentReflectance;
 #endif
 #endif
 
 
-finalDiffuse.rgb += vAmbientColor;
-finalDiffuse *= surfaceAlbedo.rgb;
-finalDiffuse = max(finalDiffuse, 0.0);
-finalDiffuse = (finalDiffuse * vLightingIntensity.x + surfaceAlbedo.rgb * environmentIrradiance) * ambientOcclusionColor;
+// _____________________________ Refraction ______________________________________
+#ifdef REFRACTION
+	vec3 finalRefraction = surfaceRefractionColor;
+	finalRefraction *= refractance;
+#endif
 
 
-float luminanceOverAlpha = 0.0;
-#ifdef RADIANCEOVERALPHA
-	luminanceOverAlpha += getLuminance(environmentRadiance);
+// _____________________________ Emissive ________________________________________
+	vec3 finalEmissive = vEmissiveColor;
+#ifdef EMISSIVE
+	vec3 emissiveColorTex = texture2D(emissiveSampler, vEmissiveUV + uvOffset).rgb;
+	finalEmissive *= toLinearSpace(emissiveColorTex.rgb);
+	finalEmissive *=  vEmissiveInfos.y;
 #endif
 #endif
 
 
-#ifdef SPECULARTERM
-	vec3 finalSpecular = lightSpecularContribution * surfaceReflectivityColor;
-	#ifdef SPECULAROVERALPHA
+// _____________________________ Highlights on Alpha _____________________________
+#ifdef ALPHABLEND
+	float luminanceOverAlpha = 0.0;
+	#ifdef RADIANCEOVERALPHA
+		luminanceOverAlpha += getLuminance(environmentRadiance);
+	#endif
+
+	#if defined(SPECULARTERM) && defined(SPECULAROVERALPHA)
 		luminanceOverAlpha += getLuminance(finalSpecular);
 		luminanceOverAlpha += getLuminance(finalSpecular);
 	#endif
 	#endif
-#else
-	vec3 finalSpecular = vec3(0.0);
-#endif
-finalSpecular *= vLightingIntensity.x;
 
 
-#if defined(RADIANCEOVERALPHA) || defined(SPECULAROVERALPHA)
-	alpha = clamp(alpha + luminanceOverAlpha * alpha, 0., 1.);
+	#if defined(RADIANCEOVERALPHA) || defined(SPECULAROVERALPHA)
+		alpha = clamp(alpha + luminanceOverAlpha * alpha, 0., 1.);
+	#endif
 #endif
 #endif
 
 
-// Composition
-// Reflection already includes the environment intensity.
-vec4 finalColor = vec4(finalDiffuse + finalSpecular + environmentRadiance + refractance, alpha);
-#ifdef EMISSIVEASILLUMINATION
-	finalColor.rgb += (surfaceEmissiveColor * vLightingIntensity.y);
+// _______________________________________________________________________________
+// _____________________________ Composition _____________________________________
+	// Reflection already includes the environment intensity.
+	vec4 finalColor = vec4(finalDiffuse			* ambientOcclusionColor * vLightingIntensity.x +
+#ifdef REFLECTION
+						finalIrradiance			* ambientOcclusionColor * vLightingIntensity.z +
 #endif
 #endif
+#ifdef SPECULARTERM
+						finalSpecular			* vLightingIntensity.x * vLightingIntensity.w +
+#endif
+#ifdef REFLECTION
+						finalRadiance			* vLightingIntensity.z +
+#endif
+#ifdef REFRACTION
+						finalRefraction			* vLightingIntensity.z +
+#endif
+						finalEmissive			* vLightingIntensity.y,
+						alpha);
 
 
+// _____________________________ LightMappping _____________________________________
 #ifdef LIGHTMAP
 #ifdef LIGHTMAP
     #ifndef LIGHTMAPEXCLUDED
     #ifndef LIGHTMAPEXCLUDED
         #ifdef USELIGHTMAPASSHADOWMAP
         #ifdef USELIGHTMAPASSHADOWMAP
@@ -580,35 +598,24 @@ vec4 finalColor = vec4(finalDiffuse + finalSpecular + environmentRadiance + refr
     #endif
     #endif
 #endif
 #endif
 
 
+// _____________________________ Finally ___________________________________________
 	finalColor = max(finalColor, 0.0);
 	finalColor = max(finalColor, 0.0);
 
 
-#ifdef CAMERATONEMAP
-	finalColor.rgb = toneMaps(finalColor.rgb);
-#endif
-
 #include<logDepthFragment>
 #include<logDepthFragment>
 #include<fogFragment>(color, finalColor)
 #include<fogFragment>(color, finalColor)
 
 
-#ifdef CAMERACONTRAST
-	finalColor = contrasts(finalColor);
+#ifdef HDRLINEAROUTPUT
+	// Sanitize output incase invalid normals or tangents have caused div by 0 or undefined behavior
+	// this also limits the brightness which helpfully reduces over-sparkling in bloom (native handles this in the bloom blur shader)
+	finalColor.rgb = clamp(finalColor.rgb, 0., 30.0);
+#else
+	// Alway run even to ensure going back to gamma space.
+	finalColor.rgb = applyImageProcessing(finalColor);
 #endif
 #endif
 
 
-#ifdef LDROUTPUT
-	finalColor.rgb = toGammaSpace(finalColor.rgb);
-
-	finalColor.rgb = clamp(finalColor.rgb, 0., 1.);
-
-	#ifdef CAMERACOLORGRADING
-		finalColor = colorGrades(finalColor);
-	#endif
-
-	#ifdef CAMERACOLORCURVES
-		finalColor.rgb = applyColorCurves(finalColor.rgb);
-	#endif
-#else
-	//sanitize output incase invalid normals or tangents have caused div by 0 or undefined behavior
-	//this also limits the brightness which helpfully reduces over-sparkling in bloom (native handles this in the bloom blur shader)
-	finalColor.rgb = clamp(finalColor.rgb, 0., 30.0);
+#ifdef PREMULTIPLYALPHA
+	// Convert to associative (premultiplied) format if needed.
+	finalColor.rgb *= result.a;
 #endif
 #endif
 
 
 	gl_FragColor = finalColor;
 	gl_FragColor = finalColor;

+ 21 - 0
src/Tools/babylon.decorators.ts

@@ -68,6 +68,15 @@
         return generateSerializableMember(7, sourceName); // color curves
         return generateSerializableMember(7, sourceName); // color curves
     }
     }
 
 
+    export function serializeAsColor4(sourceName?: string) {
+        return generateSerializableMember(8, sourceName); // color 4
+    }
+
+    export function serializeAsImageProcessing(sourceName?: string) {
+        return generateSerializableMember(9, sourceName); // image processing
+    }
+
+
     export class SerializationHelper {
     export class SerializationHelper {
 
 
         public static Serialize<T>(entity: T, serializationObject?: any): any {
         public static Serialize<T>(entity: T, serializationObject?: any): any {
@@ -113,6 +122,12 @@
                         case 7:     // Color Curves
                         case 7:     // Color Curves
                             serializationObject[targetPropertyName] = sourceProperty.serialize();
                             serializationObject[targetPropertyName] = sourceProperty.serialize();
                             break;
                             break;
+                        case 8:     // Color 4
+                            serializationObject[targetPropertyName] = (<Color4>sourceProperty).asArray();
+                            break;
+                        case 9:     // Image Processing
+                            serializationObject[targetPropertyName] = (<ImageProcessing>sourceProperty).serialize();
+                            break;
                     }
                     }
                 }
                 }
             }
             }
@@ -160,6 +175,12 @@
                         case 7:     // Color Curves
                         case 7:     // Color Curves
                             destination[property] = ColorCurves.Parse(sourceProperty);
                             destination[property] = ColorCurves.Parse(sourceProperty);
                             break;
                             break;
+                        case 8:     // Color 4
+                            destination[property] = Color4.FromArray(sourceProperty);
+                            break;
+                        case 9:     // Image Processing
+                            destination[property] = ImageProcessing.Parse(sourceProperty);
+                            break;
                     }
                     }
                 }
                 }
             }
             }

+ 16 - 0
src/babylon.scene.ts

@@ -209,6 +209,19 @@
             this.markAllMaterialsAsDirty(Material.TextureDirtyFlag);
             this.markAllMaterialsAsDirty(Material.TextureDirtyFlag);
         }
         }
 
 
+        protected _imageProcessingConfiguration: ImageProcessing;
+        /**
+         * Default image processing configuration used either in the rendering
+         * Forward main pass or through the imageProcessingPostProcess if present.
+         * As in the majority of the scene they are the same (exception for multi camera),
+         * this is easier to reference from here than from all the materials and post process.
+         * 
+         * No setter as we it is a shared configuration, you can set the values instead.
+         */
+        public get imageProcessingConfiguration(): ImageProcessing {
+            return this._imageProcessingConfiguration;
+        }
+
         public forceWireframe = false;
         public forceWireframe = false;
         private _forcePointsCloud = false;
         private _forcePointsCloud = false;
         public set forcePointsCloud(value : boolean) {
         public set forcePointsCloud(value : boolean) {
@@ -792,6 +805,9 @@
 
 
             // Uniform Buffer
             // Uniform Buffer
             this._createUbo();
             this._createUbo();
+
+            // Default Image processing definition.
+            this._imageProcessingConfiguration = new ImageProcessingConfiguration();
         }
         }
 
 
         // Properties
         // Properties