Browse Source

Merge pull request #2423 from sebavan/ImageProcessing

Image processing update
David Catuhe 8 years ago
parent
commit
aef8719c26
32 changed files with 1663 additions and 964 deletions
  1. 36 16
      Tools/Gulp/config.json
  2. BIN
      assets/textures/colorGrade.png
  3. 0 2
      loaders/src/glTF/2.0/babylon.glTFLoader.ts
  4. 27 3
      materialsLibrary/src/legacyPBR/babylon.legacyPBRMaterial.ts
  5. 4 0
      materialsLibrary/src/legacyPBR/babylon.legacyPbrMaterial.js.include.fx
  6. 0 0
      materialsLibrary/src/legacyPBR/legacyColorCurves.fx
  7. 0 0
      materialsLibrary/src/legacyPBR/legacyColorCurvesDefinition.fx
  8. 0 0
      materialsLibrary/src/legacyPBR/legacyColorGrading.fx
  9. 0 0
      materialsLibrary/src/legacyPBR/legacyColorGradingDefinition.fx
  10. 18 5
      materialsLibrary/src/legacyPBR/legacyPbr.fragment.fx
  11. 1 1
      materialsLibrary/test/addlegacypbr.js
  12. 7 2
      materialsLibrary/test/addpbr.js
  13. 111 247
      src/Materials/PBR/babylon.pbrBaseMaterial.ts
  14. 0 2
      src/Materials/PBR/babylon.pbrBaseSimpleMaterial.ts
  15. 138 60
      src/Materials/PBR/babylon.pbrMaterial.ts
  16. 7 45
      src/Materials/Textures/babylon.colorGradingTexture.ts
  17. 4 0
      src/Materials/babylon.effect.ts
  18. 431 0
      src/Materials/babylon.imageProcessingConfiguration.ts
  19. 5 4
      src/Materials/babylon.materialHelper.ts
  20. 167 59
      src/Materials/babylon.standardMaterial.ts
  21. 284 145
      src/PostProcess/babylon.imageProcessingPostProcess.ts
  22. 31 1
      src/Shaders/ShadersInclude/helperFunctions.fx
  23. 24 0
      src/Shaders/ShadersInclude/imageProcessingDeclaration.fx
  24. 120 0
      src/Shaders/ShadersInclude/imageProcessingFunctions.fx
  25. 0 9
      src/Shaders/ShadersInclude/pbrFragmentDeclaration.fx
  26. 11 64
      src/Shaders/ShadersInclude/pbrFunctions.fx
  27. 0 3
      src/Shaders/ShadersInclude/pbrUboDeclaration.fx
  28. 11 13
      src/Shaders/default.fragment.fx
  29. 14 115
      src/Shaders/imageProcessing.fragment.fx
  30. 175 168
      src/Shaders/pbr.fragment.fx
  31. 21 0
      src/Tools/babylon.decorators.ts
  32. 16 0
      src/babylon.scene.ts

+ 36 - 16
Tools/Gulp/config.json

@@ -21,7 +21,8 @@
                 "additionalPostProcess_blur", "additionalPostProcess_fxaa", "additionalPostProcess_imageProcessing",
                 "bones", "hdr", "polygonMesh", "csg", "lensFlares", "physics", "textureFormats", "debug", "morphTargets",
                 "colorCurves", "octrees", "simd", "vr", "virtualJoystick", "optimizations", "highlights", "assetsManager",
-                "mapTexture", "dynamicFloatArray", "serialization", "probes", "layer", "textureTools"
+                "mapTexture", "dynamicFloatArray",
+                "imageProcessing", "serialization", "probes", "layer", "textureTools"
         ],
         "minimal": ["standardMaterial", "freeCamera", "hemisphericLight"],
         "minimalWithBuilder": ["meshBuilder", "standardMaterial", "freeCamera", "hemisphericLight"],
@@ -30,8 +31,10 @@
                 "shadows", "pointLight", "directionalLight", "spotLight",
                 "multiMaterial", "pbrMaterial",
                 "meshBuilder", "layer",
-                "additionalPostProcess_blur", "additionalPostProcess_fxaa", "additionalPostProcess_highlights", "additionalPostProcess_imageProcessing", "colorCurves", "defaultRenderingPipeline",
-                "debug", "textureTools"
+                "additionalPostProcess_blur", "additionalPostProcess_fxaa", "additionalPostProcess_highlights", 
+                "additionalPostProcess_imageProcessing", "colorCurves", "defaultRenderingPipeline", "imageProcessing",
+                "debug", "textureTools", "hdr",
+                "loader"
         ],
         "distributed": ["minimalViewer"]
     },
@@ -243,10 +246,8 @@
                 "shadowsFragmentFunctions",
                 "fresnelFunction",
                 "reflectionFunction",
-                "colorGradingDefinition",
-                "colorGrading",
-                "colorCurvesDefinition",
-                "colorCurves",
+                "imageProcessingDeclaration",
+                "imageProcessingFunctions",
                 "bumpFragmentFunctions",
                 "clipPlaneFragmentDeclaration",
                 "logDepthDeclaration",
@@ -298,12 +299,10 @@
                 "pbrUboDeclaration",
                 "fresnelFunction",
                 "reflectionFunction",
-                "colorGradingDefinition",
-                "colorCurvesDefinition",
                 "shadowsFragmentFunctions",
                 "pbrFunctions",
-                "colorGrading",
-                "colorCurves",
+                "imageProcessingDeclaration",
+                "imageProcessingFunctions",
                 "harmonicsFunctions",
                 "pbrLightFunctions",
                 "helperFunctions",
@@ -569,7 +568,7 @@
                 "instancesVertex",
                 "bonesVertex"
             ]
-        },            
+        },
         "postProcesses" : 
         {
             "files": [
@@ -638,8 +637,12 @@
             ], 
             "shaders" : [
                 "imageProcessing.fragment"
+            ],
+            "shaderIncludes": [
+                "imageProcessingDeclaration",
+                "imageProcessingFunctions"
             ]
-        },                                          
+        },
         "additionalPostProcesses" : 
         {
             "files": [
@@ -837,17 +840,30 @@
         {
             "files": [
                 "../../src/Materials/Textures/babylon.colorGradingTexture.js",
-                "../../src/Materials/babylon.colorCurves.js"                       
+                "../../src/Materials/babylon.colorCurves.js"
             ],
             "dependUpon" : [
                 "core"
             ]
         },
+        "imageProcessing" : 
+        {
+            "files": [
+                "../../src/Materials/babylon.imageProcessingConfiguration.js"
+            ],
+            "dependUpon" : [
+                "colorCurves"
+            ],
+            "shaderIncludes": [
+                "imageProcessingDeclaration",
+                "imageProcessingFunctions"
+            ]
+        },
         "octrees" : 
         {
             "files": [
                 "../../src/Culling/Octrees/babylon.octree.js",
-                "../../src/Culling/Octrees/babylon.octreeBlock.js"                       
+                "../../src/Culling/Octrees/babylon.octreeBlock.js"
             ],
             "dependUpon" : [
                 "core"
@@ -1167,7 +1183,11 @@
                     "../../materialsLibrary/src/legacyPBR/legacyPbrLightFunctions.fx",
                     "../../materialsLibrary/src/legacyPBR/legacyPbrLightFunctionsCall.fx",
                     "../../materialsLibrary/src/legacyPBR/legacyPbrUboDeclaration.fx",
-                    "../../materialsLibrary/src/legacyPBR/legacyPbrVertexDeclaration.fx"
+                    "../../materialsLibrary/src/legacyPBR/legacyPbrVertexDeclaration.fx",
+                    "../../materialsLibrary/src/legacyPBR/legacyColorCurves.fx",
+                    "../../materialsLibrary/src/legacyPBR/legacyColorCurvesDefinition.fx",
+                    "../../materialsLibrary/src/legacyPBR/legacyColorGrading.fx",
+                    "../../materialsLibrary/src/legacyPBR/legacyColorGradingDefinition.fx"
                 ],
                 "output": "babylon.legacyPbrMaterial.js"
             }

BIN
assets/textures/colorGrade.png


+ 0 - 2
loaders/src/glTF/2.0/babylon.glTFLoader.ts

@@ -879,14 +879,12 @@ module BABYLON.GLTF2 {
         public createPbrMaterial(material: IGLTFMaterial): void {
             var babylonMaterial = new PBRMaterial(material.name || "mat" + material.index, this._babylonScene);
             babylonMaterial.sideOrientation = Material.CounterClockWiseSideOrientation;
-            babylonMaterial.useScalarInLinearSpace = true;
             material.babylonMaterial = babylonMaterial;
         }
 
         public loadMaterialBaseProperties(material: IGLTFMaterial): void {
             var babylonMaterial = material.babylonMaterial as PBRMaterial;
 
-            babylonMaterial.useEmissiveAsIllumination = (material.emissiveFactor || material.emissiveTexture) ? true : false;
             babylonMaterial.emissiveColor = material.emissiveFactor ? Color3.FromArray(material.emissiveFactor) : new Color3(0, 0, 0);
             if (material.doubleSided) {
                 babylonMaterial.backFaceCulling = false;

+ 27 - 3
materialsLibrary/src/legacyPBR/babylon.legacyPBRMaterial.ts

@@ -1164,7 +1164,14 @@ module BABYLON {
                     ColorCurves.PrepareUniforms(uniforms);
                 }
                 if (this._defines.CAMERACOLORGRADING) {
-                    ColorGradingTexture.PrepareUniformsAndSamplers(uniforms, samplers);
+                    uniforms.push(
+                        "vCameraColorGradingInfos", 
+                        "vCameraColorGradingScaleOffset"
+                    );
+
+                    samplers.push(
+                        "cameraColorGrading2DSampler"
+                    );
                 }
                 MaterialHelper.PrepareUniformsAndSamplersList(<EffectCreationOptions>{
                     uniformsNames: uniforms, 
@@ -1527,7 +1534,24 @@ module BABYLON {
                     }
 
                     if (this.cameraColorGradingTexture && StandardMaterial.ColorGradingTextureEnabled) {
-                        ColorGradingTexture.Bind(this.cameraColorGradingTexture, this._effect);
+                        this._effect.setTexture("cameraColorGrading2DSampler", this.cameraColorGradingTexture);
+
+                        let x = this.cameraColorGradingTexture.level;                 // Texture Level
+                        let y = this.cameraColorGradingTexture.getSize().height;      // Texture Size example with 8
+                        let z = y - 1.0;                    // SizeMinusOne 8 - 1
+                        let w = 1 / y;                      // Space of 1 slice 1 / 8
+                        
+                        this._effect.setFloat4("vCameraColorGradingInfos", x, y, z, w);
+                        
+                        let slicePixelSizeU = w / y;    // Space of 1 pixel in U direction, e.g. 1/64
+                        let slicePixelSizeV = w;		// Space of 1 pixel in V direction, e.g. 1/8					    // Space of 1 pixel in V direction, e.g. 1/8
+                        
+                        let x2 = z * slicePixelSizeU;   // Extent of lookup range in U for a single slice so that range corresponds to (size-1) texels, for example 7/64
+                        let y2 = z / y;	                // Extent of lookup range in V for a single slice so that range corresponds to (size-1) texels, for example 7/8
+                        let z2 = 0.5 * slicePixelSizeU;	// Offset of lookup range in U to align sample position with texel centre, for example 0.5/64 
+                        let w2 = 0.5 * slicePixelSizeV;	// Offset of lookup range in V to align sample position with texel centre, for example 0.5/8
+                        
+                        this._effect.setFloat4("vCameraColorGradingScaleOffset", x2, y2, z2, w2);
                     }
                 }
 
@@ -1545,7 +1569,7 @@ module BABYLON {
 
                 // Lights
                 if (this._myScene.lightsEnabled && !this.disableLighting) {
-                    PBRMaterial.BindLights(this._myScene, mesh, this._effect, this._defines, this.useScalarInLinearSpace, this.maxSimultaneousLights, this.usePhysicalLightFalloff);
+                    LegacyPBRMaterial.BindLights(this._myScene, mesh, this._effect, this._defines, this.useScalarInLinearSpace, this.maxSimultaneousLights, this.usePhysicalLightFalloff);
                 }
 
                 // View

File diff suppressed because it is too large
+ 4 - 0
materialsLibrary/src/legacyPBR/babylon.legacyPbrMaterial.js.include.fx


src/Shaders/ShadersInclude/colorCurves.fx → materialsLibrary/src/legacyPBR/legacyColorCurves.fx


src/Shaders/ShadersInclude/colorCurvesDefinition.fx → materialsLibrary/src/legacyPBR/legacyColorCurvesDefinition.fx


src/Shaders/ShadersInclude/colorGrading.fx → materialsLibrary/src/legacyPBR/legacyColorGrading.fx


src/Shaders/ShadersInclude/colorGradingDefinition.fx → materialsLibrary/src/legacyPBR/legacyColorGradingDefinition.fx


+ 18 - 5
materialsLibrary/src/legacyPBR/legacyPbr.fragment.fx

@@ -105,11 +105,11 @@ varying vec3 vDirectionW;
 #endif
 
 #ifdef CAMERACOLORGRADING
-	#include<colorGradingDefinition>
+	#include<legacyColorGradingDefinition>
 #endif
 
 #ifdef CAMERACOLORCURVES
-	#include<colorCurvesDefinition>
+	#include<legacyColorCurvesDefinition>
 #endif
 
 // PBR
@@ -117,17 +117,30 @@ varying vec3 vDirectionW;
 #include<legacyPbrFunctions>
 
 #ifdef CAMERACOLORGRADING
-	#include<colorGrading>
+	#include<legacyColorGrading>
 #endif
 
 #ifdef CAMERACOLORCURVES
-	#include<colorCurves>
+	#include<legacyColorCurves>
 #endif
 
 #include<harmonicsFunctions>
 #include<legacyPbrLightFunctions>
 
-#include<helperFunctions>
+mat3 transposeMat3(mat3 inMatrix) {
+	vec3 i0 = inMatrix[0];
+	vec3 i1 = inMatrix[1];
+	vec3 i2 = inMatrix[2];
+
+	mat3 outMatrix = mat3(
+		vec3(i0.x, i1.x, i2.x),
+		vec3(i0.y, i1.y, i2.y),
+		vec3(i0.z, i1.z, i2.z)
+		);
+
+	return outMatrix;
+}
+
 #include<bumpFragmentFunctions>
 #include<clipPlaneFragmentDeclaration>
 #include<logDepthDeclaration>

+ 1 - 1
materialsLibrary/test/addlegacypbr.js

@@ -7,7 +7,7 @@ window.prepareLegacyPBR = function() {
     
     var hdrTexture = new BABYLON.HDRCubeTexture("../assets/textures/hdr/environment.hdr", scene, 512);
 
-    var colorGradingTexture = new BABYLON.ColorGradingTexture("../assets/textures/ColorGrading.3DL", scene);
+    //var colorGradingTexture = new BABYLON.ColorGradingTexture("../assets/textures/ColorGrading.3DL", scene);
     
     // Uncomment for PMREM Generation
     // var hdrTexture = new BABYLON.HDRCubeTexture("textures/hdr/environment.hdr", scene, 128, false, true, false, true);

+ 7 - 2
materialsLibrary/test/addpbr.js

@@ -7,7 +7,7 @@ window.preparePBR = function() {
     
     var hdrTexture = new BABYLON.HDRCubeTexture("../assets/textures/hdr/environment.hdr", scene, 512);
 
-    var colorGradingTexture = new BABYLON.ColorGradingTexture("../assets/textures/ColorGrading.3DL", scene);
+    //var colorGradingTexture = new BABYLON.ColorGradingTexture("../assets/textures/ColorGrading.3DL", scene);
     
     // Uncomment for PMREM Generation
     // var hdrTexture = new BABYLON.HDRCubeTexture("textures/hdr/environment.hdr", scene, 128, false, true, false, true);
@@ -15,7 +15,12 @@ window.preparePBR = function() {
     pbr.refractionTexture = hdrTexture;
     pbr.linkRefractionWithTransparency = true;
     pbr.indexOfRefraction = 0.52;
-    
+
+	// var colorGrading = new BABYLON.ColorGradingTexture("../assets/textures/lateSunset.3DL", scene);
+	var colorGrading = new BABYLON.Texture("../assets/textures/colorGrade.png", scene, true);
+	pbr.cameraColorGradingTexture = colorGrading;
+	pbr.colorGradingEnabled = true;
+
 	pbr.reflectivityColor = new BABYLON.Color3(0.3, 0.3, 0.3);
 	pbr.microSurface = 0.9;
     

+ 111 - 247
src/Materials/PBR/babylon.pbrBaseMaterial.ts

@@ -1,5 +1,5 @@
 module BABYLON {
-    class PBRMaterialDefines extends MaterialDefines {
+    class PBRMaterialDefines extends MaterialDefines implements IImageProcessingConfigurationDefines {
         public PBR = true;
         public ALBEDO = false;
         public AMBIENT = false;
@@ -20,9 +20,6 @@
         public POINTSIZE = false;
         public FOG = false;
         public SPECULARTERM = false;
-        public OPACITYFRESNEL = false;
-        public EMISSIVEFRESNEL = false;
-        public FRESNEL = false;
         public NORMAL = false;
         public TANGENT = false;
         public UV1 = false;
@@ -34,7 +31,6 @@
         public INSTANCES = false;
         public MICROSURFACEFROMREFLECTIVITYMAP = false;
         public MICROSURFACEAUTOMATIC = false;
-        public EMISSIVEASILLUMINATION = false;
         public LIGHTMAP = false;
         public USELIGHTMAPASSHADOWMAP = false;
         public REFLECTIONMAP_3D = false;
@@ -49,10 +45,6 @@
         public REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED = false;
         public INVERTCUBICMAP = false;
         public LOGARITHMICDEPTH = false;
-        public CAMERATONEMAP = false;
-        public CAMERACONTRAST = false;
-        public CAMERACOLORGRADING = false;
-        public CAMERACOLORCURVES = false;
         public USESPHERICALFROMREFLECTIONMAP = false;
         public REFRACTION = false;
         public REFRACTIONMAP_3D = false;
@@ -84,7 +76,21 @@
         public NUM_MORPH_INFLUENCERS = 0;
         
         public ALPHATESTVALUE = 0.5;
-        public LDROUTPUT = true;
+        public PREMULTIPLYALPHA = false;
+        public ALPHAFRESNEL = false;
+
+        public IMAGEPROCESSING = false;
+        public VIGNETTE = false;
+        public VIGNETTEBLENDMODEMULTIPLY = false;
+        public VIGNETTEBLENDMODEOPAQUE = false;
+        public TONEMAPPING = false;
+        public CONTRAST = false;
+        public COLORCURVES = false;
+        public COLORGRADING = false;
+        public SAMPLER3DGREENDEPTH = false;
+        public SAMPLER3DBGRMAP = false;
+        public IMAGEPROCESSINGPOSTPROCESS = false;
+        public EXPOSURE = false;
 
         constructor() {
             super();
@@ -139,37 +145,6 @@
         protected _disableBumpMap: boolean = false;
 
         /**
-         * The camera exposure used on this material.
-         * This property is here and not in the camera to allow controlling exposure without full screen post process.
-         * This corresponds to a photographic exposure.
-         */
-        protected _cameraExposure: number = 1.0;
-        
-        /**
-         * The camera contrast used on this material.
-         * This property is here and not in the camera to allow controlling contrast without full screen post process.
-         */
-        protected _cameraContrast: number = 1.0;
-        
-        /**
-         * Color Grading 2D Lookup Texture.
-         * This allows special effects like sepia, black and white to sixties rendering style. 
-         */
-        protected _cameraColorGradingTexture: BaseTexture = null;
-        
-        /**
-         * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). 
-         * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects.
-         * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; 
-         * corresponding to low luminance, medium luminance, and high luminance areas respectively.
-         */
-        protected _cameraColorCurves: ColorCurves = null;
-         
-        private _cameraInfos: Vector4 = new Vector4(1.0, 1.0, 0.0, 0.0);
-
-        private _microsurfaceTextureLods: Vector2 = new Vector2(0.0, 0.0);
-
-        /**
          * AKA Diffuse Texture in standard nomenclature.
          */
         protected _albedoTexture: BaseTexture;
@@ -188,6 +163,8 @@
 
         protected _reflectionTexture: BaseTexture;
 
+        private _microsurfaceTextureLods: Vector2 = new Vector2(0.0, 0.0);
+
         protected _emissiveTexture: BaseTexture;
         
         /**
@@ -255,10 +232,6 @@
          */
         protected _invertRefractionY = false;
 
-        protected _opacityFresnelParameters: FresnelParameters;
-
-        protected _emissiveFresnelParameters: FresnelParameters;
-
         /**
          * This parameters will make the material used its opacity to control how much it is refracting aginst not.
          * Materials half opaque for instance using refraction could benefit from this control.
@@ -268,13 +241,7 @@
         protected _useLightmapAsShadowmap = false;
         
         /**
-         * In this mode, the emissive informtaion will always be added to the lighting once.
-         * A light for instance can be thought as emissive.
-         */
-        protected _useEmissiveAsIllumination = false;
-        
-        /**
-         * Secifies that the alpha is coming form the albedo channel alpha channel.
+         * Specifies that the alpha is coming form the albedo channel alpha channel.
          */
         protected _useAlphaFromAlbedoTexture = false;
         
@@ -321,12 +288,6 @@
         protected _useAutoMicroSurfaceFromReflectivityMap = false;
         
         /**
-         * Allows to work with scalar in linear mode. This is definitely a matter of preferences and tools used during
-         * the creation of the material.
-         */
-        protected _useScalarInLinearSpace = false;
-        
-        /**
          * BJS is using an harcoded light falloff based on a manually sets up range.
          * In PBR, one way to represents the fallof is to use the inverse squared root algorythm.
          * This parameter can help you switch back to the BJS mode in order to create scenes using both materials.
@@ -390,10 +351,55 @@
         protected _forceAlphaTest = false;
 
         /**
-         * If false, it allows the output of the shader to be in hdr space (e.g. more than one) which is useful
-         * in combination of post process in float or half float mode.
+         * Specifies that the alpha is premultiplied before output (this enables alpha premultiplied blending).
+         * in your scene composition.
          */
-        protected _ldrOutput = true;
+        protected _premultiplyAlpha = false;
+
+        /**
+         * A fresnel is applied to the alpha of the model to ensure grazing angles edges are not alpha tested.
+         * And/Or occlude the blended part.
+         */
+        protected _useAlphaFresnel = false;
+
+        /**
+         * Default configuration related to image processing available in the PBR Material.
+         */
+        @serializeAsImageProcessingConfiguration()
+        protected _imageProcessingConfiguration: ImageProcessingConfiguration;
+
+        /**
+         * Keep track of the image processing observer to allow dispose and replace.
+         */
+        private _imageProcessingObserver: Observer<ImageProcessingConfiguration>;
+
+        /**
+         * Attaches a new image processing configuration to the PBR Material.
+         * @param configuration 
+         */
+        protected _attachImageProcessingConfiguration(configuration: ImageProcessingConfiguration): void {
+            if (configuration === this._imageProcessingConfiguration) {
+                return;
+            }
+
+            // Detaches observer.
+            if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
+                this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
+            }
+
+            // Pick the scene configuration if needed.
+            if (!configuration) {
+                this._imageProcessingConfiguration = this.getScene().imageProcessingConfiguration;
+            }
+            else {
+                this._imageProcessingConfiguration = configuration;
+            }
+
+            // Attaches observer.
+            this._imageProcessingObserver = this._imageProcessingConfiguration.onUpdateParameters.add(conf => {
+                this._markAllSubMeshesAsTexturesDirty();
+            });
+        }
 
         private _renderTargets = new SmartArray<RenderTargetTexture>(16);
         private _worldViewProjectionMatrix = Matrix.Zero();
@@ -411,6 +417,9 @@
         constructor(name: string, scene: Scene) {
             super(name, scene);
 
+            // Setup the default processing configuration to the scene.
+            this._attachImageProcessingConfiguration(null);
+
             this.getRenderTargetTextures = (): SmartArray<RenderTargetTexture> => {
                 this._renderTargets.reset();
 
@@ -441,7 +450,7 @@
             if (this._linkRefractionWithTransparency) {
                 return false;
             }
-            return (this.alpha < 1.0) || (this._opacityTexture != null) || this._shouldUseAlphaFromAlbedoTexture() || this._opacityFresnelParameters && this._opacityFresnelParameters.isEnabled;
+            return (this.alpha < 1.0) || (this._opacityTexture != null) || this._shouldUseAlphaFromAlbedoTexture();
         }
 
         public needAlphaTesting(): boolean {
@@ -463,60 +472,7 @@
             return this._albedoTexture;
         }
 
-        private convertColorToLinearSpaceToRef(color: Color3, ref: Color3): void {
-            PBRMaterial.convertColorToLinearSpaceToRef(color, ref, this._useScalarInLinearSpace);
-        }
-
-        private static convertColorToLinearSpaceToRef(color: Color3, ref: Color3, useScalarInLinear: boolean): void {
-            if (!useScalarInLinear) {
-                color.toLinearSpaceToRef(ref);
-            } else {
-                ref.r = color.r;
-                ref.g = color.g;
-                ref.b = color.b;
-            }
-        }
-
-        private static _scaledAlbedo = new Color3();
         private static _scaledReflectivity = new Color3();
-        private static _scaledEmissive = new Color3();
-        private static _scaledReflection = new Color3();
-
-        public static BindLights(scene: Scene, mesh: AbstractMesh, effect: Effect, defines: MaterialDefines, useScalarInLinearSpace: boolean, maxSimultaneousLights: number, usePhysicalLightFalloff: boolean) {
-            var lightIndex = 0;
-            for (var light of mesh._lightSources) {
-                let useUbo = light._uniformBuffer.useUbo;
-                let scaledIntensity = light.getScaledIntensity();
-
-                light._uniformBuffer.bindToEffect(effect, "Light" + lightIndex);
-                MaterialHelper.BindLightProperties(light, effect, lightIndex);
-
-                // GAMMA CORRECTION.
-                this.convertColorToLinearSpaceToRef(light.diffuse, PBRMaterial._scaledAlbedo, useScalarInLinearSpace);
-
-                PBRMaterial._scaledAlbedo.scaleToRef(scaledIntensity, PBRMaterial._scaledAlbedo);
-                light._uniformBuffer.updateColor4(useUbo ? "vLightDiffuse" : "vLightDiffuse" + lightIndex, PBRMaterial._scaledAlbedo, usePhysicalLightFalloff ? light.radius : light.range);
-
-                if (defines["SPECULARTERM"]) {
-                    this.convertColorToLinearSpaceToRef(light.specular, PBRMaterial._scaledReflectivity, useScalarInLinearSpace);
-
-                    PBRMaterial._scaledReflectivity.scaleToRef(scaledIntensity, PBRMaterial._scaledReflectivity);
-                    light._uniformBuffer.updateColor3(useUbo ? "vLightSpecular" : "vLightSpecular" + lightIndex, PBRMaterial._scaledReflectivity);
-                }
-
-                // Shadows
-                if (scene.shadowsEnabled) {
-                    MaterialHelper.BindLightShadow(light, scene, mesh, lightIndex + "", effect);
-                }
-
-                light._uniformBuffer.update();
-
-                lightIndex++;
-
-                if (lightIndex === maxSimultaneousLights)
-                    break;
-            }
-        }
 
         public isReadyForSubMesh(mesh: AbstractMesh, subMesh: SubMesh, useInstances?: boolean): boolean { 
             if (this.isFrozen) {
@@ -551,8 +507,6 @@
                         defines.LODBASEDMICROSFURACE = true;
                     }
 
-                    defines.LDROUTPUT = this._ldrOutput;
-
                     if (this._albedoTexture && StandardMaterial.DiffuseTextureEnabled) {
                         if (!this._albedoTexture.isReadyOrNotBlocking()) {
                             return false;
@@ -745,79 +699,44 @@
                             }
                         }
                     }
-                
-                    if (this._cameraColorGradingTexture && StandardMaterial.ColorGradingTextureEnabled) {
-                        // Color Grading texure can not be none blocking.
-                        if (!this._cameraColorGradingTexture.isReady()) {
-                            return false;
-                        }
-                        
-                        defines.CAMERACOLORGRADING = true;
-                    }
-
-                    if (!this.backFaceCulling && this._twoSidedLighting) {
-                        defines.TWOSIDEDLIGHTING = true;
-                    }
 
                     if (this._shouldUseAlphaFromAlbedoTexture()) {
                         defines.ALPHAFROMALBEDO = true;
                     }
 
-                    if (this._useEmissiveAsIllumination) {
-                        defines.EMISSIVEASILLUMINATION = true;
-                    }
-
-                    if (this._cameraContrast != 1) {
-                        defines.CAMERACONTRAST = true;
-                    }
-
-                    if (this._cameraExposure != 1) {
-                        defines.CAMERATONEMAP = true;
-                    }
-                    
-                    if (this._cameraColorCurves) {
-                        defines.CAMERACOLORCURVES = true;
-                    }
-
-                    if (this._useSpecularOverAlpha) {
-                        defines.SPECULAROVERALPHA = true;
-                    }
-
-                    if (this._usePhysicalLightFalloff) {
-                        defines.USEPHYSICALLIGHTFALLOFF = true;
-                    }
+                }
 
-                    if (this._useRadianceOverAlpha) {
-                        defines.RADIANCEOVERALPHA = true;
-                    }
+                if (this._useSpecularOverAlpha) {
+                    defines.SPECULAROVERALPHA = true;
+                }
 
-                    if ((this._metallic !== undefined && this._metallic !== null) || (this._roughness !== undefined && this._roughness !== null)) {
-                        defines.METALLICWORKFLOW = true;
-                    }   
+                if (this._usePhysicalLightFalloff) {
+                    defines.USEPHYSICALLIGHTFALLOFF = true;
+                }
 
-                    defines.ALPHATESTVALUE = this._alphaCutOff;
-                    defines.ALPHABLEND = this.needAlphaBlending();
+                if (this._useRadianceOverAlpha) {
+                    defines.RADIANCEOVERALPHA = true;
                 }
-            }
 
-            if (defines._areFresnelDirty) {
-                if (StandardMaterial.FresnelEnabled) {
-                    // Fresnel
-                    if (this._opacityFresnelParameters && this._opacityFresnelParameters.isEnabled ||
-                        this._emissiveFresnelParameters && this._emissiveFresnelParameters.isEnabled) {
+                if ((this._metallic !== undefined && this._metallic !== null) || (this._roughness !== undefined && this._roughness !== null)) {
+                    defines.METALLICWORKFLOW = true;
+                }
 
-                        if (this._opacityFresnelParameters && this._opacityFresnelParameters.isEnabled) {
-                            defines.OPACITYFRESNEL = true;
-                        }
+                if (!this.backFaceCulling && this._twoSidedLighting) {
+                    defines.TWOSIDEDLIGHTING = true;
+                }
 
-                        if (this._emissiveFresnelParameters && this._emissiveFresnelParameters.isEnabled) {
-                            defines.EMISSIVEFRESNEL = true;
-                        }
+                defines.ALPHATESTVALUE = this._alphaCutOff;
+                defines.PREMULTIPLYALPHA = this._premultiplyAlpha;
+                defines.ALPHABLEND = this.needAlphaBlending();
+                defines.ALPHAFRESNEL = this._useAlphaFresnel;
 
-                        defines.FRESNEL = true;
-                    }
+                if (!this._imageProcessingConfiguration.isReady()) {
+                    return false;
                 }
-            }            
+
+                this._imageProcessingConfiguration.prepareDefines(defines);
+            }
 
             // Misc.
             MaterialHelper.PrepareDefinesForMisc(mesh, scene, this._useLogarithmicDepth, this.pointsCloud, this.fogEnabled, defines);
@@ -835,7 +754,7 @@
                 }
             }
 
-            // Get correct effect      
+            // Get correct effect
             if (defines.isDirty) {
                 defines.markAsProcessed();
                 scene.resetCachedMaterial();
@@ -888,18 +807,6 @@
                     fallbacks.addFallback(0, "SPECULARTERM");
                 }
 
-                if (defines.OPACITYFRESNEL) {
-                    fallbacks.addFallback(1, "OPACITYFRESNEL");
-                }
-
-                if (defines.EMISSIVEFRESNEL) {
-                    fallbacks.addFallback(2, "EMISSIVEFRESNEL");
-                }
-
-                if (defines.FRESNEL) {
-                    fallbacks.addFallback(3, "FRESNEL");
-                }
-
                 if (defines.NUM_BONE_INFLUENCERS > 0) {
                     fallbacks.addCPUSkinningFallback(0, mesh);
                 }
@@ -936,25 +843,20 @@
                         "vAlbedoInfos", "vAmbientInfos", "vOpacityInfos", "vReflectionInfos", "vEmissiveInfos", "vReflectivityInfos", "vMicroSurfaceSamplerInfos", "vBumpInfos", "vLightmapInfos", "vRefractionInfos",
                         "mBones",
                         "vClipPlane", "albedoMatrix", "ambientMatrix", "opacityMatrix", "reflectionMatrix", "emissiveMatrix", "reflectivityMatrix", "microSurfaceSamplerMatrix", "bumpMatrix", "lightmapMatrix", "refractionMatrix",
-                        "opacityParts", "emissiveLeftColor", "emissiveRightColor",
                         "vLightingIntensity",
                         "logarithmicDepthConstant",
                         "vSphericalX", "vSphericalY", "vSphericalZ",
                         "vSphericalXX", "vSphericalYY", "vSphericalZZ",
                         "vSphericalXY", "vSphericalYZ", "vSphericalZX",
-                        "vMicrosurfaceTextureLods",
-                        "vCameraInfos"
+                        "vMicrosurfaceTextureLods"
                 ];
 
                 var samplers = ["albedoSampler", "ambientSampler", "opacitySampler", "reflectionCubeSampler", "reflection2DSampler", "emissiveSampler", "reflectivitySampler", "microSurfaceSampler", "bumpSampler", "lightmapSampler", "refractionCubeSampler", "refraction2DSampler"];
                 var uniformBuffers = ["Material", "Scene"];
 
-                if (defines.CAMERACOLORCURVES) {
-                    ColorCurves.PrepareUniforms(uniforms);
-                }
-                if (defines.CAMERACOLORGRADING) {
-                    ColorGradingTexture.PrepareUniformsAndSamplers(uniforms, samplers);
-                }
+                ImageProcessingConfiguration.PrepareUniforms(uniforms, defines);
+                ImageProcessingConfiguration.PrepareSamplers(samplers, defines);
+
                 MaterialHelper.PrepareUniformsAndSamplersList(<EffectCreationOptions>{
                     uniformsNames: uniforms, 
                     uniformBuffersNames: uniformBuffers,
@@ -1028,9 +930,6 @@
             this._uniformBuffer.addUniform("vMicrosurfaceTextureLods", 2);
             this._uniformBuffer.addUniform("vReflectivityColor", 4);
             this._uniformBuffer.addUniform("vEmissiveColor", 3);
-            this._uniformBuffer.addUniform("opacityParts", 4);
-            this._uniformBuffer.addUniform("emissiveLeftColor", 4);
-            this._uniformBuffer.addUniform("emissiveRightColor", 4);
 
             this._uniformBuffer.addUniform("pointSize", 1);
             this._uniformBuffer.create();
@@ -1077,18 +976,6 @@
 
                 if (!this._uniformBuffer.useUbo || !this.isFrozen || !this._uniformBuffer.isSync) {
 
-                    // Fresnel
-                    if (StandardMaterial.FresnelEnabled) {
-                        if (this._opacityFresnelParameters && this._opacityFresnelParameters.isEnabled) {
-                            this._uniformBuffer.updateColor4("opacityParts", new Color3(this._opacityFresnelParameters.leftColor.toLuminance(), this._opacityFresnelParameters.rightColor.toLuminance(), this._opacityFresnelParameters.bias), this._opacityFresnelParameters.power);
-                        }
-
-                        if (this._emissiveFresnelParameters && this._emissiveFresnelParameters.isEnabled) {
-                            this._uniformBuffer.updateColor4("emissiveLeftColor", this._emissiveFresnelParameters.leftColor, this._emissiveFresnelParameters.power);
-                            this._uniformBuffer.updateColor4("emissiveRightColor", this._emissiveFresnelParameters.rightColor, this._emissiveFresnelParameters.bias);
-                        }
-                    }
-
                     // Texture uniforms      
                     if (scene.texturesEnabled) {
                         if (this._albedoTexture && StandardMaterial.DiffuseTextureEnabled) {
@@ -1205,22 +1092,12 @@
                         this._uniformBuffer.updateColor4("vReflectivityColor", PBRMaterial._scaledReflectivity, 0);
                     }
                     else {
-                        // GAMMA CORRECTION.
-                        this.convertColorToLinearSpaceToRef(this._reflectivityColor, PBRMaterial._scaledReflectivity);
-                        this._uniformBuffer.updateColor4("vReflectivityColor", PBRMaterial._scaledReflectivity, this._microSurface);
+                        this._uniformBuffer.updateColor4("vReflectivityColor", this._reflectivityColor, this._microSurface);
                     }
 
-                    // GAMMA CORRECTION.
-                    this.convertColorToLinearSpaceToRef(this._emissiveColor, PBRMaterial._scaledEmissive);
-                    this._uniformBuffer.updateColor3("vEmissiveColor", PBRMaterial._scaledEmissive);
-
-                    // GAMMA CORRECTION.
-                    this.convertColorToLinearSpaceToRef(this._reflectionColor, PBRMaterial._scaledReflection);
-                    this._uniformBuffer.updateColor3("vReflectionColor", PBRMaterial._scaledReflection);
-
-                    // GAMMA CORRECTION.
-                    this.convertColorToLinearSpaceToRef(this._albedoColor, PBRMaterial._scaledAlbedo);
-                    this._uniformBuffer.updateColor4("vAlbedoColor", PBRMaterial._scaledAlbedo, this.alpha * mesh.visibility);
+                    this._uniformBuffer.updateColor3("vEmissiveColor", this._emissiveColor);
+                    this._uniformBuffer.updateColor3("vReflectionColor", this._reflectionColor);
+                    this._uniformBuffer.updateColor4("vAlbedoColor", this._albedoColor, this.alpha * mesh.visibility);
 
 
                     // Misc
@@ -1286,10 +1163,6 @@
                             this._uniformBuffer.setTexture("refraction2DSampler", this._refractionTexture);
                         }
                     }
-
-                    if (this._cameraColorGradingTexture && StandardMaterial.ColorGradingTextureEnabled) {
-                        ColorGradingTexture.Bind(this._cameraColorGradingTexture, this._activeEffect);
-                    }
                 }
 
                 // Clip plane
@@ -1305,7 +1178,7 @@
             if (this._mustRebind(scene, effect) || !this.isFrozen) {
                 // Lights
                 if (scene.lightsEnabled && !this._disableLighting) {
-                    PBRMaterial.BindLights(scene, mesh, this._activeEffect, defines, this._useScalarInLinearSpace, this._maxSimultaneousLights, this._usePhysicalLightFalloff);
+                    MaterialHelper.BindLights(scene, mesh, this._activeEffect, defines, this._maxSimultaneousLights, this._usePhysicalLightFalloff);
                 }
 
                 // View
@@ -1321,13 +1194,8 @@
                     MaterialHelper.BindMorphTargetParameters(mesh, this._activeEffect);
                 }
 
-                this._cameraInfos.x = this._cameraExposure;
-                this._cameraInfos.y = this._cameraContrast;
-                effect.setVector4("vCameraInfos", this._cameraInfos);
-                
-                if (this._cameraColorCurves) {
-                    ColorCurves.Bind(this._cameraColorCurves, this._activeEffect);
-                }
+                // image processing
+                this._imageProcessingConfiguration.bind(this._activeEffect);
 
                 // Log. depth
                 MaterialHelper.BindLogDepth(defines, this._activeEffect, scene);
@@ -1381,10 +1249,6 @@
             if (this._refractionTexture && this._refractionTexture.animations && this._refractionTexture.animations.length > 0) {
                 results.push(this._refractionTexture);
             }
-            
-            if (this._cameraColorGradingTexture && this._cameraColorGradingTexture.animations && this._cameraColorGradingTexture.animations.length > 0) {
-                results.push(this._cameraColorGradingTexture);
-            }
 
             return results;
         }
@@ -1430,14 +1294,14 @@
                 if (this._refractionTexture) {
                     this._refractionTexture.dispose();
                 }
-                
-                if (this._cameraColorGradingTexture) {
-                    this._cameraColorGradingTexture.dispose();
-                }
             }
 
             this._renderTargets.dispose();
 
+            if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
+                this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
+            }
+
             super.dispose(forceDisposeEffect, forceDisposeTextures);
         }
     }

+ 0 - 2
src/Materials/PBR/babylon.pbrBaseSimpleMaterial.ts

@@ -171,9 +171,7 @@
         constructor(name: string, scene: Scene) {
             super(name, scene);
 
-            this._useEmissiveAsIllumination = true;
             this._useAmbientInGrayScale = true;
-            this._useScalarInLinearSpace = true;
         }
     }
 }

+ 138 - 60
src/Materials/PBR/babylon.pbrMaterial.ts

@@ -80,41 +80,6 @@
         public disableBumpMap: boolean = false;
 
         /**
-         * The camera exposure used on this material.
-         * This property is here and not in the camera to allow controlling exposure without full screen post process.
-         * This corresponds to a photographic exposure.
-         */
-        @serialize()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public cameraExposure: number = 1.0;
-        
-        /**
-         * The camera contrast used on this material.
-         * This property is here and not in the camera to allow controlling contrast without full screen post process.
-         */
-        @serialize()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public cameraContrast: number = 1.0;
-        
-        /**
-         * Color Grading 2D Lookup Texture.
-         * This allows special effects like sepia, black and white to sixties rendering style. 
-         */
-        @serializeAsTexture()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public cameraColorGradingTexture: BaseTexture = null;
-        
-        /**
-         * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). 
-         * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects.
-         * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; 
-         * corresponding to low luminance, medium luminance, and high luminance areas respectively.
-         */
-        @serializeAsColorCurves()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public cameraColorCurves: ColorCurves = null;
-
-        /**
          * AKA Diffuse Texture in standard nomenclature.
          */
         @serializeAsTexture()
@@ -244,14 +209,6 @@
         @expandToProperty("_markAllSubMeshesAsTexturesDirty")
         public invertRefractionY = false;
 
-        @serializeAsFresnelParameters()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public opacityFresnelParameters: FresnelParameters;
-
-        @serializeAsFresnelParameters()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public emissiveFresnelParameters: FresnelParameters;
-
         /**
          * This parameters will make the material used its opacity to control how much it is refracting aginst not.
          * Materials half opaque for instance using refraction could benefit from this control.
@@ -265,15 +222,7 @@
         public useLightmapAsShadowmap = false;
         
         /**
-         * In this mode, the emissive informtaion will always be added to the lighting once.
-         * A light for instance can be thought as emissive.
-         */
-        @serialize()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public useEmissiveAsIllumination = false;
-        
-        /**
-         * Secifies that the alpha is coming form the albedo channel alpha channel.
+         * Specifies that the alpha is coming form the albedo channel alpha channel.
          */
         @serialize()
         @expandToProperty("_markAllSubMeshesAsTexturesDirty")
@@ -338,14 +287,6 @@
         public useAutoMicroSurfaceFromReflectivityMap = false;
         
         /**
-         * Allows to work with scalar in linear mode. This is definitely a matter of preferences and tools used during
-         * the creation of the material.
-         */
-        @serialize()
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public useScalarInLinearSpace = false;
-        
-        /**
          * BJS is using an harcoded light falloff based on a manually sets up range.
          * In PBR, one way to represents the fallof is to use the inverse squared root algorythm.
          * This parameter can help you switch back to the BJS mode in order to create scenes using both materials.
@@ -419,6 +360,143 @@
         public twoSidedLighting = false;
 
         /**
+         * Specifies that the alpha is premultiplied before output (this enables alpha premultiplied blending).
+         * in your scene composition.
+         */
+        @serialize()
+        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
+        public premultiplyAlpha = false;
+
+        /**
+         * A fresnel is applied to the alpha of the model to ensure grazing angles edges are not alpha tested.
+         * And/Or occlude the blended part.
+         */
+        @serialize()
+        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
+        public useAlphaFresnel = false;
+
+        /**
+         * Gets the image processing configuration used either in this material.
+         */
+        public get imageProcessingConfiguration(): ImageProcessingConfiguration {
+            return this._imageProcessingConfiguration;
+        }
+
+        /**
+         * Sets the Default image processing configuration used either in the this material.
+         * 
+         * If sets to null, the scene one is in use.
+         */
+        public set imageProcessingConfiguration(value: ImageProcessingConfiguration) {
+            this._attachImageProcessingConfiguration(value);
+
+            // Ensure the effect will be rebuilt.
+            this._markAllSubMeshesAsTexturesDirty();
+        }
+
+        /**
+         * Gets wether the color curves effect is enabled.
+         */
+        public get cameraColorCurvesEnabled(): boolean {
+            return this.imageProcessingConfiguration.colorCurvesEnabled;
+        }
+        /**
+         * Sets wether the color curves effect is enabled.
+         */
+        public set cameraColorCurvesEnabled(value: boolean) {
+            this.imageProcessingConfiguration.colorCurvesEnabled = value;
+        }
+
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public get cameraColorGradingEnabled(): boolean {
+            return this.imageProcessingConfiguration.colorGradingEnabled;
+        }
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public set cameraColorGradingEnabled(value: boolean) {
+            this.imageProcessingConfiguration.colorGradingEnabled = value;
+        }
+
+        /**
+         * Gets wether tonemapping is enabled or not.
+         */
+        public get cameraToneMappingEnabled(): boolean {
+            return this._imageProcessingConfiguration.toneMappingEnabled;
+        };
+        /**
+         * Sets wether tonemapping is enabled or not
+         */
+        public set cameraToneMappingEnabled(value: boolean) {
+            this._imageProcessingConfiguration.toneMappingEnabled = value;
+        };
+
+        /**
+         * The camera exposure used on this material.
+         * This property is here and not in the camera to allow controlling exposure without full screen post process.
+         * This corresponds to a photographic exposure.
+         */
+        public get cameraExposure(): number {
+            return this._imageProcessingConfiguration.exposure;
+        };
+        /**
+         * The camera exposure used on this material.
+         * This property is here and not in the camera to allow controlling exposure without full screen post process.
+         * This corresponds to a photographic exposure.
+         */
+        public set cameraExposure(value: number) {
+            this._imageProcessingConfiguration.exposure = value;
+        };
+        
+        /**
+         * Gets The camera contrast used on this material.
+         */
+        public get cameraContrast(): number {
+            return this._imageProcessingConfiguration.contrast;
+        }
+
+        /**
+         * Sets The camera contrast used on this material.
+         */
+        public set cameraContrast(value: number) {
+            this._imageProcessingConfiguration.contrast = value;
+        }
+        
+        /**
+         * Gets the Color Grading 2D Lookup Texture.
+         */
+        public get cameraColorGradingTexture(): BaseTexture {
+            return this._imageProcessingConfiguration.colorGradingTexture;
+        }
+        /**
+         * Sets the Color Grading 2D Lookup Texture.
+         */
+        public set cameraColorGradingTexture(value: BaseTexture) {
+            this._imageProcessingConfiguration.colorGradingTexture = value;
+        }
+
+        /**
+         * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). 
+         * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects.
+         * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; 
+         * corresponding to low luminance, medium luminance, and high luminance areas respectively.
+         */
+        public get cameraColorCurves(): ColorCurves {
+            return this._imageProcessingConfiguration.colorCurves;
+        }
+        /**
+         * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). 
+         * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects.
+         * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; 
+         * corresponding to low luminance, medium luminance, and high luminance areas respectively.
+         */
+        public set cameraColorCurves(value: ColorCurves) {
+            this._imageProcessingConfiguration.colorCurves = value;
+        }
+
+        /**
          * Instantiates a new PBRMaterial instance.
          * 
          * @param name The material name

+ 7 - 45
src/Materials/Textures/babylon.colorGradingTexture.ts

@@ -122,7 +122,6 @@ module BABYLON {
                         tempData[pixelStorageIndex + 0] = r;
                         tempData[pixelStorageIndex + 1] = g;
                         tempData[pixelStorageIndex + 2] = b;
-                        tempData[pixelStorageIndex + 3] = 0;
 
                         pixelIndexSlice++;
                         if (pixelIndexSlice % size == 0) {
@@ -137,8 +136,13 @@ module BABYLON {
                 }
 
                 for (let i = 0; i < tempData.length; i++) {
-                    var value = tempData[i];
-                    data[i] = (value / maxColor * 255);
+                    if (i > 0 && (i+1) % 4 === 0) {
+                        data[i] = 255;
+                    }
+                    else {
+                        var value = tempData[i];
+                        data[i] = (value / maxColor * 255);
+                    }
                 }
 
                 this.getScene().getEngine().updateTextureSize(texture, size * size, size);
@@ -186,48 +190,6 @@ module BABYLON {
             }
         }
 
-         /**
-         * Binds the color grading to the shader.
-         * @param colorGrading The texture to bind
-         * @param effect The effect to bind to
-         */
-        public static Bind(colorGrading: BaseTexture, effect: Effect) : void {
-            effect.setTexture("cameraColorGrading2DSampler", colorGrading);
-                        
-     	    let x = colorGrading.level;                 // Texture Level
-            let y = colorGrading.getSize().height;      // Texture Size example with 8
-            let z = y - 1.0;                    // SizeMinusOne 8 - 1
-            let w = 1 / y;                      // Space of 1 slice 1 / 8
-            
-            effect.setFloat4("vCameraColorGradingInfos", x, y, z, w);
-            
-            let slicePixelSizeU = w / y;    // Space of 1 pixel in U direction, e.g. 1/64
-            let slicePixelSizeV = w;		// Space of 1 pixel in V direction, e.g. 1/8					    // Space of 1 pixel in V direction, e.g. 1/8
-            
-            let x2 = z * slicePixelSizeU;   // Extent of lookup range in U for a single slice so that range corresponds to (size-1) texels, for example 7/64
-            let y2 = z / y;	                // Extent of lookup range in V for a single slice so that range corresponds to (size-1) texels, for example 7/8
-            let z2 = 0.5 * slicePixelSizeU;	// Offset of lookup range in U to align sample position with texel centre, for example 0.5/64 
-            let w2 = 0.5 * slicePixelSizeV;	// Offset of lookup range in V to align sample position with texel centre, for example 0.5/8
-            
-            effect.setFloat4("vCameraColorGradingScaleOffset", x2, y2, z2, w2);
-        }
-        
-        /**
-         * Prepare the list of uniforms associated with the ColorGrading effects.
-         * @param uniformsList The list of uniforms used in the effect
-         * @param samplersList The list of samplers used in the effect
-         */
-        public static PrepareUniformsAndSamplers(uniformsList: string[], samplersList: string[]): void {
-            uniformsList.push(
-                "vCameraColorGradingInfos", 
-                "vCameraColorGradingScaleOffset"
-            );
-
-            samplersList.push(
-                "cameraColorGrading2DSampler"
-            );
-        }
-
         /**
          * Parses a color grading texture serialized by Babylon.
          * @param parsedTexture The texture information being parsedTexture

+ 4 - 0
src/Materials/babylon.effect.ts

@@ -197,6 +197,10 @@
             return this._isReady;
         }
 
+        public getEngine(): Engine {
+            return this._engine;
+        }
+
         public getProgram(): WebGLProgram {
             return this._program;
         }

+ 431 - 0
src/Materials/babylon.imageProcessingConfiguration.ts

@@ -0,0 +1,431 @@
+module BABYLON {
+    /**
+     * Interface to follow in your material defines to integrate easily the
+     * Image proccessing functions.
+     */
+    export interface IImageProcessingConfigurationDefines {
+        IMAGEPROCESSING: boolean;
+        VIGNETTE: boolean;
+        VIGNETTEBLENDMODEMULTIPLY: boolean;
+        VIGNETTEBLENDMODEOPAQUE: boolean;
+        TONEMAPPING: boolean;
+        CONTRAST: boolean;
+        EXPOSURE: boolean;
+        COLORCURVES: boolean;
+        COLORGRADING: boolean;
+        SAMPLER3DGREENDEPTH: boolean;
+        SAMPLER3DBGRMAP: boolean;
+        IMAGEPROCESSINGPOSTPROCESS: boolean;
+    }
+
+    /**
+     * This groups together the common properties used for image processing either in direct forward pass
+     * or through post processing effect depending on the use of the image processing pipeline in your scene 
+     * or not.
+     */
+    export class ImageProcessingConfiguration {
+
+        /**
+         * Color curves setup used in the effect if colorCurvesEnabled is set to true 
+         */
+        @serializeAsColorCurves()
+        public colorCurves = new ColorCurves();
+
+        @serialize()
+        private _colorCurvesEnabled = false;
+        /**
+         * Gets wether the color curves effect is enabled.
+         */
+        public get colorCurvesEnabled(): boolean {
+            return this._colorCurvesEnabled;
+        }
+        /**
+         * Sets wether the color curves effect is enabled.
+         */
+        public set colorCurvesEnabled(value: boolean) {
+            if (this._colorCurvesEnabled === value) {
+                return;
+            }
+
+            this._colorCurvesEnabled = value;
+            this._updateParameters();
+        }
+
+        /**
+         * Color grading LUT texture used in the effect if colorGradingEnabled is set to true 
+         */
+        @serializeAsTexture()
+        public colorGradingTexture: BaseTexture;
+
+        @serialize()
+        private _colorGradingEnabled = false;
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public get colorGradingEnabled(): boolean {
+            return this._colorGradingEnabled;
+        }
+        /**
+         * Sets wether the color grading effect is enabled.
+         */
+        public set colorGradingEnabled(value: boolean) {
+            if (this._colorGradingEnabled === value) {
+                return;
+            }
+
+            this._colorGradingEnabled = value;
+            this._updateParameters();
+        }
+
+        @serialize()
+        private _colorGradingWithGreenDepth = false;
+        /**
+         * Gets wether the color grading effect is using a green depth for the 3d Texture.
+         */
+        public get colorGradingWithGreenDepth(): boolean {
+            return this._colorGradingWithGreenDepth;
+        }
+        /**
+         * Sets wether the color grading effect is using a green depth for the 3d Texture.
+         */
+        public set colorGradingWithGreenDepth(value: boolean) {
+            if (this._colorGradingWithGreenDepth === value) {
+                return;
+            }
+
+            this._colorGradingWithGreenDepth = value;
+            this._updateParameters();
+        }
+
+        @serialize()
+        private _colorGradingBGR = false;
+        /**
+         * Gets wether the color grading texture contains BGR values.
+         */
+        public get colorGradingBGR(): boolean {
+            return this._colorGradingBGR;
+        }
+        /**
+         * Sets wether the color grading texture contains BGR values.
+         */
+        public set colorGradingBGR(value: boolean) {
+            if (this._colorGradingBGR === value) {
+                return;
+            }
+
+            this._colorGradingBGR = value;
+            this._updateParameters();
+        }
+
+        /**
+         * Exposure used in the effect.
+         */
+        @serialize()
+        public exposure = 1.0;
+
+        @serialize()
+        private _toneMappingEnabled = false;
+        /**
+         * Gets wether the tone mapping effect is enabled.
+         */
+        public get toneMappingEnabled(): boolean {
+            return this._toneMappingEnabled;
+        }
+        /**
+         * Sets wether the tone mapping effect is enabled.
+         */
+        public set toneMappingEnabled(value: boolean) {
+            if (this._toneMappingEnabled === value) {
+                return;
+            }
+
+            this._toneMappingEnabled = value;
+            this._updateParameters();
+        }
+
+        @serialize()
+        protected _contrast = 1.0;
+        /**
+         * Gets the contrast used in the effect.
+         */
+        public get contrast(): number {
+            return this._contrast;
+        }
+        /**
+         * Sets the contrast used in the effect.
+         */
+        public set contrast(value: number) {
+            if (this._contrast === value) {
+                return;
+            }
+
+            this._contrast = value;
+            this._updateParameters();
+        }
+
+        /**
+         * Vignette stretch size.
+         */
+        @serialize()
+        public vignetteStretch = 0;
+
+        /**
+         * Vignette centre X Offset.
+         */
+        @serialize()
+        public vignetteCentreX = 0;
+
+        /**
+         * Vignette centre Y Offset.
+         */
+        @serialize()
+        public vignetteCentreY = 0;
+
+        /**
+         * Vignette weight or intensity of the vignette effect.
+         */
+        @serialize()
+        public vignetteWeight = 1.5;
+
+        /**
+         * Color of the vignette applied on the screen through the chosen blend mode (vignetteBlendMode)
+         * if vignetteEnabled is set to true.
+         */
+        @serializeAsColor4()
+        public vignetteColor: BABYLON.Color4 = new BABYLON.Color4(0, 0, 0, 0);
+
+        /**
+         * Camera field of view used by the Vignette effect.
+         */
+        @serialize()
+        public vignetteCameraFov = 0.5;
+
+        @serialize()
+        private _vignetteBlendMode = ImageProcessingConfiguration.VIGNETTEMODE_MULTIPLY;
+        /**
+         * Gets the vignette blend mode allowing different kind of effect.
+         */
+        public get vignetteBlendMode(): number {
+            return this._vignetteBlendMode;
+        }
+        /**
+         * Sets the vignette blend mode allowing different kind of effect.
+         */
+        public set vignetteBlendMode(value: number) {
+            if (this._vignetteBlendMode === value) {
+                return;
+            }
+
+            this._vignetteBlendMode = value;
+            this._updateParameters();
+        }
+
+        @serialize()
+        private _vignetteEnabled = false;
+        /**
+         * Gets wether the vignette effect is enabled.
+         */
+        public get vignetteEnabled(): boolean {
+            return this._vignetteEnabled;
+        }
+        /**
+         * Sets wether the vignette effect is enabled.
+         */
+        public set vignetteEnabled(value: boolean) {
+            if (this._vignetteEnabled === value) {
+                return;
+            }
+
+            this._vignetteEnabled = value;
+            this._updateParameters();
+        }
+
+        @serialize()
+        private _applyByPostProcess = false;
+        /**
+         * Gets wether the image processing is applied through a post process or not.
+         */
+        public get applyByPostProcess(): boolean {
+            return this._applyByPostProcess;
+        }
+        /**
+         * Sets wether the image processing is applied through a post process or not.
+         */
+        public set applyByPostProcess(value: boolean) {
+            if (this._applyByPostProcess === value) {
+                return;
+            }
+
+            this._applyByPostProcess = value;
+            this._updateParameters();
+        }
+
+        /**
+        * An event triggered when the configuration changes and requires Shader to Update some parameters.
+        * @type {BABYLON.Observable}
+        */
+        public onUpdateParameters = new Observable<ImageProcessingConfiguration>();
+
+        /**
+         * Method called each time the image processing information changes requires to recompile the effect.
+         */
+        protected _updateParameters(): void {
+            this.onUpdateParameters.notifyObservers(this);
+        }
+
+        /**
+         * Prepare the list of uniforms associated with the Image Processing effects.
+         * @param uniformsList The list of uniforms used in the effect
+         * @param defines the list of defines currently in use
+         */
+        public static PrepareUniforms(uniforms: string[], defines: IImageProcessingConfigurationDefines): void {
+            if (defines.EXPOSURE) {
+                uniforms.push("exposureLinear");
+            }
+            if (defines.CONTRAST) {
+                uniforms.push("contrast");
+            }
+            if (defines.COLORGRADING) {
+                uniforms.push("colorTransformSettings");
+            }
+            if (defines.VIGNETTE) {
+                uniforms.push("vInverseScreenSize");
+                uniforms.push("vignetteSettings1");
+                uniforms.push("vignetteSettings2");
+            }
+            if (defines.COLORCURVES) {
+                ColorCurves.PrepareUniforms(uniforms);
+            }
+        }
+
+        /**
+         * Prepare the list of samplers associated with the Image Processing effects.
+         * @param uniformsList The list of uniforms used in the effect
+         * @param defines the list of defines currently in use
+         */
+        public static PrepareSamplers(samplersList: string[], defines: IImageProcessingConfigurationDefines): void {
+            if (defines.COLORGRADING) {
+                samplersList.push("txColorTransform");
+            }
+        }
+
+        /**
+         * Prepare the list of defines associated to the shader.
+         * @param defines the list of defines to complete
+         */
+        public prepareDefines(defines: IImageProcessingConfigurationDefines): void {
+            defines.VIGNETTE = this.vignetteEnabled;
+            defines.VIGNETTEBLENDMODEMULTIPLY = (this.vignetteBlendMode === ImageProcessingConfiguration._VIGNETTEMODE_MULTIPLY);
+            defines.VIGNETTEBLENDMODEOPAQUE = !defines.VIGNETTEBLENDMODEMULTIPLY;
+            defines.TONEMAPPING = this.toneMappingEnabled;
+            defines.CONTRAST = (this.contrast !== 1.0);
+            defines.EXPOSURE = (this.exposure !== 1.0);
+            defines.COLORCURVES = (this.colorCurvesEnabled && !!this.colorCurves);
+            defines.COLORGRADING = (this.colorGradingEnabled && !!this.colorGradingTexture);
+            defines.SAMPLER3DGREENDEPTH = this.colorGradingWithGreenDepth;
+            defines.SAMPLER3DBGRMAP = this.colorGradingBGR;
+            defines.IMAGEPROCESSINGPOSTPROCESS = this.applyByPostProcess;
+            defines.IMAGEPROCESSING = defines.VIGNETTE || defines.TONEMAPPING || defines.CONTRAST || defines.EXPOSURE || defines.COLORCURVES || defines.COLORGRADING;
+        }
+
+        /**
+         * Returns true if all the image processing information are ready.
+         */
+        public isReady() {
+            // Color Grading texure can not be none blocking.
+            return !this.colorGradingEnabled || !this.colorGradingTexture || this.colorGradingTexture.isReady();
+        }
+
+        /**
+         * Binds the image processing to the shader.
+         * @param effect The effect to bind to
+         */
+        public bind(effect: Effect, aspectRatio = 1) : void {
+            // Color Curves
+            if (this._colorCurvesEnabled) {
+                ColorCurves.Bind(this.colorCurves, effect);
+            }
+
+            // Vignette
+            if (this._vignetteEnabled) {
+                var inverseWidth = 1 / effect.getEngine().getRenderWidth();
+                var inverseHeight = 1 / effect.getEngine().getRenderHeight();
+                effect.setFloat2("vInverseScreenSize", inverseWidth, inverseHeight);
+
+                let vignetteScaleY = Math.tan(this.vignetteCameraFov * 0.5);
+                let vignetteScaleX = vignetteScaleY * aspectRatio;
+
+                let vignetteScaleGeometricMean = Math.sqrt(vignetteScaleX * vignetteScaleY);
+                vignetteScaleX = Tools.Mix(vignetteScaleX, vignetteScaleGeometricMean, this.vignetteStretch);
+                vignetteScaleY = Tools.Mix(vignetteScaleY, vignetteScaleGeometricMean, this.vignetteStretch);
+
+                effect.setFloat4("vignetteSettings1", vignetteScaleX, vignetteScaleY, -vignetteScaleX * this.vignetteCentreX, -vignetteScaleY * this.vignetteCentreY);
+
+                let vignettePower = -2.0 * this.vignetteWeight;
+                effect.setFloat4("vignetteSettings2", this.vignetteColor.r, this.vignetteColor.g, this.vignetteColor.b, vignettePower);
+            }
+
+            // Exposure
+            effect.setFloat("exposureLinear", this.exposure);
+            
+            // Contrast
+            effect.setFloat("contrast", this.contrast);
+            
+            // Color transform settings
+            if (this.colorGradingTexture) {
+                effect.setTexture("txColorTransform", this.colorGradingTexture);
+                let textureSize = this.colorGradingTexture.getSize().height;
+
+                effect.setFloat4("colorTransformSettings",
+                    (textureSize - 1) / textureSize, // textureScale
+                    0.5 / textureSize, // textureOffset
+                    textureSize, // textureSize
+                    this.colorGradingTexture.level // weight
+                );
+            }
+        }
+
+        /**
+         * Clones the current image processing instance.
+         * @return The cloned image processing
+         */
+        public clone(): ImageProcessingConfiguration {
+            return SerializationHelper.Clone(() => new ImageProcessingConfiguration(), this);
+        }
+
+        /**
+         * Serializes the current image processing instance to a json representation.
+         * @return a JSON representation
+         */
+        public serialize(): any {
+            return SerializationHelper.Serialize(this);
+        }
+
+        /**
+         * Parses the image processing from a json representation.
+         * @param source the JSON source to parse
+         * @return The parsed image processing
+         */      
+        public static Parse(source: any) : ImageProcessingConfiguration {
+            return SerializationHelper.Parse(() => new ImageProcessingConfiguration(), source, null, null);
+        }
+
+        // Static constants associated to the image processing.
+        private static _VIGNETTEMODE_MULTIPLY = 0;
+        private static _VIGNETTEMODE_OPAQUE = 1;
+
+        /**
+         * Used to apply the vignette as a mix with the pixel color.
+         */
+        public static get VIGNETTEMODE_MULTIPLY(): number {
+            return this._VIGNETTEMODE_MULTIPLY;
+        }
+
+        /**
+         * Used to apply the vignette as a replacement of the pixel color.
+         */
+        public static get VIGNETTEMODE_OPAQUE(): number {
+            return this._VIGNETTEMODE_OPAQUE;
+        }
+    }
+} 

+ 5 - 4
src/Materials/babylon.materialHelper.ts

@@ -327,17 +327,18 @@
             light.transferToEffect(effect, lightIndex + "");
         }
 
-        public static BindLights(scene: Scene, mesh: AbstractMesh, effect: Effect, defines: MaterialDefines, maxSimultaneousLights = 4) {
+        public static BindLights(scene: Scene, mesh: AbstractMesh, effect: Effect, defines: MaterialDefines, maxSimultaneousLights = 4, usePhysicalLightFalloff = false) {
             var lightIndex = 0;
             for (var light of mesh._lightSources) {
+                let scaledIntensity = light.getScaledIntensity();
                 light._uniformBuffer.bindToEffect(effect, "Light" + lightIndex);
 
                 MaterialHelper.BindLightProperties(light, effect, lightIndex);
 
-                light.diffuse.scaleToRef(light.intensity, Tmp.Color3[0]);
-                light._uniformBuffer.updateColor4("vLightDiffuse", Tmp.Color3[0], light.range, lightIndex + "");
+                light.diffuse.scaleToRef(scaledIntensity, Tmp.Color3[0]);
+                light._uniformBuffer.updateColor4("vLightDiffuse", Tmp.Color3[0], usePhysicalLightFalloff ? light.radius : light.range, lightIndex + "");
                 if (defines["SPECULARTERM"]) {
-                    light.specular.scaleToRef(light.intensity, Tmp.Color3[1]);
+                    light.specular.scaleToRef(scaledIntensity, Tmp.Color3[1]);
                     light._uniformBuffer.updateColor3("vLightSpecular", Tmp.Color3[1], lightIndex + "");
                 }
 

+ 167 - 59
src/Materials/babylon.standardMaterial.ts

@@ -1,5 +1,5 @@
 module BABYLON {
-   export class StandardMaterialDefines extends MaterialDefines {
+   export class StandardMaterialDefines extends MaterialDefines implements IImageProcessingConfigurationDefines {
         public DIFFUSE = false;
         public AMBIENT = false;
         public OPACITY = false;
@@ -57,14 +57,25 @@ module BABYLON {
         public INVERTNORMALMAPY = false;
         public TWOSIDEDLIGHTING = false;
         public SHADOWFLOAT = false;
-        public CAMERACOLORGRADING = false;
-        public CAMERACOLORCURVES = false;
         public MORPHTARGETS = false;
         public MORPHTARGETS_NORMAL = false;
         public MORPHTARGETS_TANGENT = false;
         public NUM_MORPH_INFLUENCERS = 0;
         public USERIGHTHANDEDSYSTEM = false;
 
+        public IMAGEPROCESSING = false;
+        public VIGNETTE = false;
+        public VIGNETTEBLENDMODEMULTIPLY = false;
+        public VIGNETTEBLENDMODEOPAQUE = false;
+        public TONEMAPPING = false;
+        public CONTRAST = false;
+        public COLORCURVES = false;
+        public COLORGRADING = false;
+        public SAMPLER3DGREENDEPTH = false;
+        public SAMPLER3DBGRMAP = false;
+        public IMAGEPROCESSINGPOSTPROCESS = false;
+        public EXPOSURE = false;
+
         constructor() {
             super();
             this.rebuild();
@@ -271,24 +282,144 @@ module BABYLON {
         public twoSidedLighting: boolean;     
 
         /**
-         * Color Grading 2D Lookup Texture.
-         * This allows special effects like sepia, black and white to sixties rendering style. 
+         * Default configuration related to image processing available in the standard Material.
          */
-        @serializeAsTexture("cameraColorGradingTexture")
-        private _cameraColorGradingTexture: BaseTexture;
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public cameraColorGradingTexture: BaseTexture;             
+        protected _imageProcessingConfiguration: ImageProcessingConfiguration;
 
         /**
-         * The color grading curves provide additional color adjustmnent that is applied after any color grading transform (3D LUT). 
-         * They allow basic adjustment of saturation and small exposure adjustments, along with color filter tinting to provide white balance adjustment or more stylistic effects.
-         * These are similar to controls found in many professional imaging or colorist software. The global controls are applied to the entire image. For advanced tuning, extra controls are provided to adjust the shadow, midtone and highlight areas of the image; 
-         * corresponding to low luminance, medium luminance, and high luminance areas respectively.
+         * Gets the image processing configuration used either in this material.
          */
-        @serializeAsColorCurves("cameraColorCurves")
-        private _cameraColorCurves: ColorCurves = null;
-        @expandToProperty("_markAllSubMeshesAsTexturesDirty")
-        public cameraColorCurves: ColorCurves;             
+        public get imageProcessingConfiguration(): ImageProcessingConfiguration {
+            return this._imageProcessingConfiguration;
+        }
+
+        /**
+         * Sets the Default image processing configuration used either in the this material.
+         * 
+         * If sets to null, the scene one is in use.
+         */
+        public set imageProcessingConfiguration(value: ImageProcessingConfiguration) {
+            this._attachImageProcessingConfiguration(value);
+
+            // Ensure the effect will be rebuilt.
+            this._markAllSubMeshesAsTexturesDirty();
+        }
+
+        /**
+         * Keep track of the image processing observer to allow dispose and replace.
+         */
+        private _imageProcessingObserver: Observer<ImageProcessingConfiguration>;
+
+        /**
+         * Attaches a new image processing configuration to the Standard Material.
+         * @param configuration 
+         */
+        protected _attachImageProcessingConfiguration(configuration: ImageProcessingConfiguration): void {
+            if (configuration === this._imageProcessingConfiguration) {
+                return;
+            }
+
+            // Detaches observer.
+            if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
+                this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
+            }
+
+            // Pick the scene configuration if needed.
+            if (!configuration) {
+                this._imageProcessingConfiguration = this.getScene().imageProcessingConfiguration;
+            }
+            else {
+                this._imageProcessingConfiguration = configuration;
+            }
+
+            // Attaches observer.
+            this._imageProcessingObserver = this._imageProcessingConfiguration.onUpdateParameters.add(conf => {
+                this._markAllSubMeshesAsTexturesDirty();
+            });
+        }
+
+        /**
+         * Gets wether the color curves effect is enabled.
+         */
+        public get cameraColorCurvesEnabled(): boolean {
+            return this.imageProcessingConfiguration.colorCurvesEnabled;
+        }
+        /**
+         * Sets wether the color curves effect is enabled.
+         */
+        public set cameraColorCurvesEnabled(value: boolean) {
+            this.imageProcessingConfiguration.colorCurvesEnabled = value;
+        }
+
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public get cameraColorGradingEnabled(): boolean {
+            return this.imageProcessingConfiguration.colorGradingEnabled;
+        }
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public set cameraColorGradingEnabled(value: boolean) {
+            this.imageProcessingConfiguration.colorGradingEnabled = value;
+        }
+
+        /**
+         * Gets wether tonemapping is enabled or not.
+         */
+        public get cameraToneMappingEnabled(): boolean {
+            return this._imageProcessingConfiguration.toneMappingEnabled;
+        };
+        /**
+         * Sets wether tonemapping is enabled or not
+         */
+        public set cameraToneMappingEnabled(value: boolean) {
+            this._imageProcessingConfiguration.toneMappingEnabled = value;
+        };
+
+        /**
+         * The camera exposure used on this material.
+         * This property is here and not in the camera to allow controlling exposure without full screen post process.
+         * This corresponds to a photographic exposure.
+         */
+        public get cameraExposure(): number {
+            return this._imageProcessingConfiguration.exposure;
+        };
+        /**
+         * The camera exposure used on this material.
+         * This property is here and not in the camera to allow controlling exposure without full screen post process.
+         * This corresponds to a photographic exposure.
+         */
+        public set cameraExposure(value: number) {
+            this._imageProcessingConfiguration.exposure = value;
+        };
+        
+        /**
+         * Gets The camera contrast used on this material.
+         */
+        public get cameraContrast(): number {
+            return this._imageProcessingConfiguration.contrast;
+        }
+
+        /**
+         * Sets The camera contrast used on this material.
+         */
+        public set cameraContrast(value: number) {
+            this._imageProcessingConfiguration.contrast = value;
+        }
+        
+        /**
+         * Gets the Color Grading 2D Lookup Texture.
+         */
+        public get cameraColorGradingTexture(): BaseTexture {
+            return this._imageProcessingConfiguration.colorGradingTexture;
+        }
+        /**
+         * Sets the Color Grading 2D Lookup Texture.
+         */
+        public set cameraColorGradingTexture(value: BaseTexture) {
+            this._imageProcessingConfiguration.colorGradingTexture = value;
+        }
 
         public customShaderNameResolve: (shaderName: string, uniforms: string[], uniformBuffers: string[], samplers: string[], defines: StandardMaterialDefines) => string;
 
@@ -301,6 +432,9 @@ module BABYLON {
         constructor(name: string, scene: Scene) {
             super(name, scene);
 
+            // Setup the default processing configuration to the scene.
+            this._attachImageProcessingConfiguration(null);
+
             this.getRenderTargetTextures = (): SmartArray<RenderTargetTexture> => {
                 this._renderTargets.reset();
 
@@ -525,17 +659,6 @@ module BABYLON {
                         defines.REFRACTION = false;
                     }
 
-                    if (this._cameraColorGradingTexture && StandardMaterial.ColorGradingTextureEnabled) {
-                        // Camera Color Grading can not be none blocking.
-                        if (!this._cameraColorGradingTexture.isReady()) {
-                            return false;
-                        } else {
-                            defines.CAMERACOLORGRADING = true;
-                        }
-                    } else {
-                        defines.CAMERACOLORGRADING = false;
-                    }
-
                     defines.TWOSIDEDLIGHTING = !this._backFaceCulling && this._twoSidedLighting;
                 } else {
                     defines.DIFFUSE = false;
@@ -546,10 +669,13 @@ module BABYLON {
                     defines.LIGHTMAP = false;
                     defines.BUMP = false;
                     defines.REFRACTION = false;
-                    defines.CAMERACOLORGRADING = false;
                 }
 
-                defines.CAMERACOLORCURVES = (this._cameraColorCurves !== undefined && this._cameraColorCurves !== null);
+                if (!this.imageProcessingConfiguration.isReady()) {
+                    return false;
+                }
+
+                this.imageProcessingConfiguration.prepareDefines(defines);
 
                 defines.ALPHAFROMDIFFUSE = this._shouldUseAlphaFromDiffuseTexture();
 
@@ -558,7 +684,7 @@ module BABYLON {
                 defines.LINKEMISSIVEWITHDIFFUSE = this._linkEmissiveWithDiffuse;       
 
                 defines.SPECULAROVERALPHA = this._useSpecularOverAlpha;
-            } 
+            }
 
             if (defines._areFresnelDirty) {
                 if (StandardMaterial.FresnelEnabled) {
@@ -709,12 +835,9 @@ module BABYLON {
 
                 var uniformBuffers = ["Material", "Scene"];
 
-                if (defines.CAMERACOLORCURVES) {
-                    ColorCurves.PrepareUniforms(uniforms);
-                }
-                if (defines.CAMERACOLORGRADING) {
-                    ColorGradingTexture.PrepareUniformsAndSamplers(uniforms, samplers);
-                }
+                ImageProcessingConfiguration.PrepareUniforms(uniforms, defines);
+                ImageProcessingConfiguration.PrepareSamplers(samplers, defines);
+
                 MaterialHelper.PrepareUniformsAndSamplersList(<EffectCreationOptions>{
                     uniformsNames: uniforms, 
                     uniformBuffersNames: uniformBuffers,
@@ -970,10 +1093,6 @@ module BABYLON {
                             effect.setTexture("refraction2DSampler", this._refractionTexture);
                         }
                     }
-                    
-                    if (this._cameraColorGradingTexture && StandardMaterial.ColorGradingTextureEnabled) {
-                        ColorGradingTexture.Bind(this._cameraColorGradingTexture, effect);
-                    }
                 }
 
                 // Clip plane
@@ -1002,17 +1121,14 @@ module BABYLON {
 
                 // Morph targets
                 if (defines.NUM_MORPH_INFLUENCERS) {
-                    MaterialHelper.BindMorphTargetParameters(mesh, effect);                
+                    MaterialHelper.BindMorphTargetParameters(mesh, effect);
                 }
 
                 // Log. depth
                 MaterialHelper.BindLogDepth(defines, effect, scene);
 
-                // Color Curves
-                if (this._cameraColorCurves) {
-                    ColorCurves.Bind(this._cameraColorCurves, effect);
-                }
-
+                // image processing
+                this._imageProcessingConfiguration.bind(this._activeEffect);
             }
 
             this._uniformBuffer.update();
@@ -1057,10 +1173,6 @@ module BABYLON {
             if (this._refractionTexture && this._refractionTexture.animations && this._refractionTexture.animations.length > 0) {
                 results.push(this._refractionTexture);
             }
-            
-            if (this._cameraColorGradingTexture && this._cameraColorGradingTexture.animations && this._cameraColorGradingTexture.animations.length > 0) {
-                results.push(this._cameraColorGradingTexture);
-            }
 
             return results;
         }
@@ -1104,10 +1216,6 @@ module BABYLON {
                 activeTextures.push(this._refractionTexture);
             }
 
-            if (this._cameraColorGradingTexture) {
-                activeTextures.push(this._cameraColorGradingTexture);
-            }
-
             return activeTextures;
         }
 
@@ -1148,10 +1256,10 @@ module BABYLON {
                 if (this._refractionTexture) {
                     this._refractionTexture.dispose();
                 }
-                
-                if (this._cameraColorGradingTexture) {
-                    this._cameraColorGradingTexture.dispose();
-                }
+            }
+
+            if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
+                this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
             }
 
             super.dispose(forceDisposeEffect, forceDisposeTextures);

+ 284 - 145
src/PostProcess/babylon.imageProcessingPostProcess.ts

@@ -1,82 +1,273 @@
 module BABYLON {
     export class ImageProcessingPostProcess extends PostProcess {
-		private _colorGradingTexture: BaseTexture;
-		public colorGradingWeight: number = 1.0;
-		public colorCurves = new ColorCurves();
-        private _colorCurvesEnabled = true;
 
-        public cameraFov = 0.5;
+        /**
+         * Default configuration related to image processing available in the PBR Material.
+         */
+        protected _imageProcessingConfiguration: ImageProcessingConfiguration;
 
-		public vignetteStretch = 0;
-		public vignetteCentreX = 0;
-		public vignetteCentreY = 0;
-		public vignetteWeight = 1.5;
-		public vignetteColor: BABYLON.Color4 = new BABYLON.Color4(0, 0, 0, 0);
-		private _vignetteBlendMode = ImageProcessingPostProcess.VIGNETTEMODE_MULTIPLY;
-        private _vignetteEnabled = true;
-
-		public cameraContrast = 1.0;
-		public cameraExposure = 1.68;
-		private _cameraToneMappingEnabled = true;
-
-        private _fromLinearSpace = false;
+        /**
+         * Gets the image processing configuration used either in this material.
+         */
+        public get imageProcessingConfiguration(): ImageProcessingConfiguration {
+            return this._imageProcessingConfiguration;
+        }
 
-        public get colorGradingTexture(): BaseTexture {
-            return this._colorGradingTexture;
+        /**
+         * Sets the Default image processing configuration used either in the this material.
+         * 
+         * If sets to null, the scene one is in use.
+         */
+        public set imageProcessingConfiguration(value: ImageProcessingConfiguration) {
+            this._attachImageProcessingConfiguration(value);
         }
 
-        public set colorGradingTexture(value: BaseTexture) {
-            if (this._colorGradingTexture === value) {
+        /**
+         * Keep track of the image processing observer to allow dispose and replace.
+         */
+        private _imageProcessingObserver: Observer<ImageProcessingConfiguration>;
+
+        /**
+         * Attaches a new image processing configuration to the PBR Material.
+         * @param configuration 
+         */
+        protected _attachImageProcessingConfiguration(configuration: ImageProcessingConfiguration): void {
+            if (configuration === this._imageProcessingConfiguration) {
                 return;
             }
 
-            this._colorGradingTexture = value;
+            // Detaches observer.
+            if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
+                this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
+            }
+
+            // Pick the scene configuration if needed.
+            if (!configuration) {
+                var camera = this.getCamera();
+                var scene = camera ? camera.getScene() : BABYLON.Engine.LastCreatedScene;
+                this._imageProcessingConfiguration = scene.imageProcessingConfiguration;
+            }
+            else {
+                this._imageProcessingConfiguration = configuration;
+            }
+
+            // Attaches observer.
+            this._imageProcessingObserver = this._imageProcessingConfiguration.onUpdateParameters.add(conf => {
+                this._updateParameters();
+            });
+
+            // Ensure the effect will be rebuilt.
             this._updateParameters();
         }
 
-        public get vignetteBlendMode(): number {
-            return this._vignetteBlendMode;
+        /**
+         * Gets Color curves setup used in the effect if colorCurvesEnabled is set to true .
+         */
+        public get colorCurves(): ColorCurves {
+            return this.imageProcessingConfiguration.colorCurves;
+        }
+        /**
+         * Sets Color curves setup used in the effect if colorCurvesEnabled is set to true .
+         */
+        public set colorCurves(value: ColorCurves) {
+            this.imageProcessingConfiguration.colorCurves = value;
         }
 
-        public set vignetteBlendMode(value: number) {
-            if (this._vignetteBlendMode === value) {
-                return;
-            }
+        /**
+         * Gets wether the color curves effect is enabled.
+         */
+        public get colorCurvesEnabled(): boolean {
+            return this.imageProcessingConfiguration.colorCurvesEnabled;
+        }
+        /**
+         * Sets wether the color curves effect is enabled.
+         */
+        public set colorCurvesEnabled(value: boolean) {
+            this.imageProcessingConfiguration.colorCurvesEnabled = value;
+        }
 
-            this._vignetteBlendMode = value;
-            this._updateParameters();
-        }  
+        /**
+         * Gets Color grading LUT texture used in the effect if colorGradingEnabled is set to true.
+         */
+        public get colorGradingTexture(): BaseTexture {
+            return this.imageProcessingConfiguration.colorGradingTexture;
+        }
+        /**
+         * Sets Color grading LUT texture used in the effect if colorGradingEnabled is set to true.
+         */
+        public set colorGradingTexture(value: BaseTexture) {
+            this.imageProcessingConfiguration.colorGradingTexture = value;
+        }
 
-        public get colorCurvesEnabled(): boolean {
-            return this._colorCurvesEnabled;
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public get colorGradingEnabled(): boolean {
+            return this.imageProcessingConfiguration.colorGradingEnabled;
+        }
+        /**
+         * Gets wether the color grading effect is enabled.
+         */
+        public set colorGradingEnabled(value: boolean) {
+            this.imageProcessingConfiguration.colorGradingEnabled = value;
         }
 
-        public set colorCurvesEnabled(value: boolean) {
-            if (this._colorCurvesEnabled === value) {
-                return;
-            }
+        /**
+         * Gets exposure used in the effect.
+         */
+        public get exposure(): number {
+            return this.imageProcessingConfiguration.exposure;
+        }
+        /**
+         * Sets exposure used in the effect.
+         */
+        public set exposure(value: number) {
+            this.imageProcessingConfiguration.exposure = value;
+        }
 
-            this._colorCurvesEnabled = value;
-            this._updateParameters();
-        }           
+        /**
+         * Gets wether tonemapping is enabled or not.
+         */
+        public get toneMappingEnabled(): boolean {
+            return this._imageProcessingConfiguration.toneMappingEnabled;
+        };
+        /**
+         * Sets wether tonemapping is enabled or not
+         */
+        public set toneMappingEnabled(value: boolean) {
+            this._imageProcessingConfiguration.toneMappingEnabled = value;
+        };
+
+        /**
+         * Gets contrast used in the effect.
+         */
+        public get contrast(): number {
+            return this.imageProcessingConfiguration.contrast;
+        }
+        /**
+         * Sets contrast used in the effect.
+         */
+        public set contrast(value: number) {
+            this.imageProcessingConfiguration.contrast = value;
+        }
 
-        public get vignetteEnabled(): boolean {
-            return this._vignetteEnabled;
+        /**
+         * Gets Vignette stretch size.
+         */
+        public get vignetteStretch(): number {
+            return this.imageProcessingConfiguration.vignetteStretch;
+        }
+        /**
+         * Sets Vignette stretch size.
+         */
+        public set vignetteStretch(value: number) {
+            this.imageProcessingConfiguration.vignetteStretch = value;
         }
 
-        public set vignetteEnabled(value: boolean) {
-            if (this._vignetteEnabled === value) {
-                return;
-            }
+        /**
+         * Gets Vignette centre X Offset.
+         */
+        public get vignetteCentreX(): number {
+            return this.imageProcessingConfiguration.vignetteCentreX;
+        }
+        /**
+         * Sets Vignette centre X Offset.
+         */
+        public set vignetteCentreX(value: number) {
+            this.imageProcessingConfiguration.vignetteCentreX = value;
+        }
 
-            this._vignetteEnabled = value;
-            this._updateParameters();
-        }      
+        /**
+         * Gets Vignette centre Y Offset.
+         */
+        public get vignetteCentreY(): number {
+            return this.imageProcessingConfiguration.vignetteCentreY;
+        }
+        /**
+         * Sets Vignette centre Y Offset.
+         */
+        public set vignetteCentreY(value: number) {
+            this.imageProcessingConfiguration.vignetteCentreY = value;
+        }
+
+        /**
+         * Gets Vignette weight or intensity of the vignette effect.
+         */
+        public get vignetteWeight(): number {
+            return this.imageProcessingConfiguration.vignetteWeight;
+        }
+        /**
+         * Sets Vignette weight or intensity of the vignette effect.
+         */
+        public set vignetteWeight(value: number) {
+            this.imageProcessingConfiguration.vignetteWeight = value;
+        }
+
+        /**
+         * Gets Color of the vignette applied on the screen through the chosen blend mode (vignetteBlendMode)
+         * if vignetteEnabled is set to true.
+         */
+        public get vignetteColor(): Color4 {
+            return this.imageProcessingConfiguration.vignetteColor;
+        }
+        /**
+         * Sets Color of the vignette applied on the screen through the chosen blend mode (vignetteBlendMode)
+         * if vignetteEnabled is set to true.
+         */
+        public set vignetteColor(value: Color4) {
+            this.imageProcessingConfiguration.vignetteColor = value;
+        }
+
+        /**
+         * Gets Camera field of view used by the Vignette effect.
+         */
+        public get vignetteCameraFov(): number {
+            return this.imageProcessingConfiguration.vignetteCameraFov;
+        }
+        /**
+         * Sets Camera field of view used by the Vignette effect.
+         */
+        public set vignetteCameraFov(value: number) {
+            this.imageProcessingConfiguration.vignetteCameraFov = value;
+        }
+
+        /**
+         * Gets the vignette blend mode allowing different kind of effect.
+         */
+        public get vignetteBlendMode(): number {
+            return this.imageProcessingConfiguration.vignetteBlendMode;
+        }
+        /**
+         * Sets the vignette blend mode allowing different kind of effect.
+         */
+        public set vignetteBlendMode(value: number) {
+            this.imageProcessingConfiguration.vignetteBlendMode = value;
+        }
 
+        /**
+         * Gets wether the vignette effect is enabled.
+         */
+        public get vignetteEnabled(): boolean {
+            return this.imageProcessingConfiguration.vignetteEnabled;
+        }
+        /**
+         * Sets wether the vignette effect is enabled.
+         */
+        public set vignetteEnabled(value: boolean) {
+            this.imageProcessingConfiguration.vignetteEnabled = value;
+        }
+
+        @serialize()
+        private _fromLinearSpace = true;
+        /**
+         * Gets wether the input of the processing is in Gamma or Linear Space.
+         */
         public get fromLinearSpace(): boolean {
             return this._fromLinearSpace;
         }
-
+        /**
+         * Sets wether the input of the processing is in Gamma or Linear Space.
+         */
         public set fromLinearSpace(value: boolean) {
             if (this._fromLinearSpace === value) {
                 return;
@@ -84,122 +275,70 @@
 
             this._fromLinearSpace = value;
             this._updateParameters();
-        }              
-
-        public get cameraToneMappingEnabled(): boolean {
-            return this._cameraToneMappingEnabled;
         }
 
-        public set cameraToneMappingEnabled(value: boolean) {
-            if (this._cameraToneMappingEnabled === value) {
-                return;
-            }
-
-            this._cameraToneMappingEnabled = value;
-            this._updateParameters();
-        }               
+        /**
+         * Defines cache preventing GC.
+         */
+        private _defines: IImageProcessingConfigurationDefines & { FROMLINEARSPACE: boolean } = {
+            IMAGEPROCESSING: false,
+            VIGNETTE: false,
+            VIGNETTEBLENDMODEMULTIPLY: false,
+            VIGNETTEBLENDMODEOPAQUE: false,
+            TONEMAPPING: false,
+            CONTRAST: false,
+            COLORCURVES: false,
+            COLORGRADING: false,
+            FROMLINEARSPACE: false,
+            SAMPLER3DGREENDEPTH: false,
+            SAMPLER3DBGRMAP: false,
+            IMAGEPROCESSINGPOSTPROCESS: false,
+            EXPOSURE: false,
+        }
 
         constructor(name: string, options: number | PostProcessOptions, camera?: Camera, samplingMode?: number, engine?: Engine, reusable?: boolean, textureType: number = Engine.TEXTURETYPE_UNSIGNED_INT) {
-            super(name, "imageProcessing", [
-                                            'contrast',
-                                            'vignetteSettings1',
-                                            'vignetteSettings2',
-                                            'cameraExposureLinear',
-                                            'vCameraColorCurveNegative',
-                                            'vCameraColorCurveNeutral',
-                                            'vCameraColorCurvePositive',
-                                            'colorTransformSettings'                    
-                                            ], ["txColorTransform"], options, camera, samplingMode, engine, reusable,
+            super(name, "imageProcessing", [], [], options, camera, samplingMode, engine, reusable,
                                             null, textureType, "postprocess", null, true);
 
-            this._updateParameters();
-
-            this.onApply = (effect: Effect) => {
-                let aspectRatio = this.aspectRatio;
-                
-                // Color 
-                if (this._colorCurvesEnabled) {
-                    ColorCurves.Bind(this.colorCurves, effect);
-                }
-
-                if (this._vignetteEnabled) {
-                    // Vignette
-                    let vignetteScaleY = Math.tan(this.cameraFov * 0.5);
-                    let vignetteScaleX = vignetteScaleY * aspectRatio;
-
-                    let vignetteScaleGeometricMean = Math.sqrt(vignetteScaleX * vignetteScaleY);
-                    vignetteScaleX = Tools.Mix(vignetteScaleX, vignetteScaleGeometricMean, this.vignetteStretch);
-                    vignetteScaleY = Tools.Mix(vignetteScaleY, vignetteScaleGeometricMean, this.vignetteStretch);
+            // Setup the default processing configuration to the scene.
+            this._attachImageProcessingConfiguration(null);
 
-                    effect.setFloat4('vignetteSettings1', vignetteScaleX, vignetteScaleY, -vignetteScaleX * this.vignetteCentreX, -vignetteScaleY * this.vignetteCentreY);
+            this.imageProcessingConfiguration.applyByPostProcess = true;
 
-                    let vignettePower = -2.0 * this.vignetteWeight;
-                    effect.setFloat4('vignetteSettings2', this.vignetteColor.r, this.vignetteColor.g, this.vignetteColor.b, vignettePower);
-                }
+            this._updateParameters();
 
-                // Contrast and exposure
-                effect.setFloat('contrast', this.cameraContrast);
-                effect.setFloat('cameraExposureLinear', Math.pow(2.0, -this.cameraExposure) * Math.PI);
-                
-                // Color transform settings
-                if (this._colorGradingTexture) {
-                    effect.setTexture('txColorTransform', this.colorGradingTexture);
-                    let textureSize = this.colorGradingTexture.getSize().height;
-
-                    effect.setFloat4("colorTransformSettings",
-                        (textureSize - 1) / textureSize, // textureScale
-                        0.5 / textureSize, // textureOffset
-                        textureSize, // textureSize
-                        this.colorGradingWeight // weight
-                    );                
-                }
+            this.onApply = (effect: Effect) => {
+                this.imageProcessingConfiguration.bind(effect, this.aspectRatio);
             };
         }
 
         protected _updateParameters(): void {
+            this._defines.FROMLINEARSPACE = this._fromLinearSpace;
+            this.imageProcessingConfiguration.prepareDefines(this._defines);
             var defines = "";
-            var samplers = ["textureSampler"];
-
-            if (this.colorGradingTexture) {
-                defines = "#define COLORGRADING\r\n";
-                samplers.push("txColorTransform");
-            }
-
-            if (this._vignetteEnabled) {
-                defines += "#define VIGNETTE\r\n";
-
-                if (this.vignetteBlendMode === ImageProcessingPostProcess._VIGNETTEMODE_MULTIPLY) {
-                    defines += "#define VIGNETTEBLENDMODEMULTIPLY\r\n";
-                } else {
-                    defines += "#define VIGNETTEBLENDMODEOPAQUE\r\n";
+            for (const define in this._defines) {
+                if (this._defines[define]) {
+                    defines += `#define ${define};\r\n`;
                 }
             }
 
-            if (this.cameraToneMappingEnabled) {
-                defines += "#define TONEMAPPING\r\n";
-            }
-
-            if (this._colorCurvesEnabled && this.colorCurves) {
-                defines += "#define COLORCURVES\r\n";
-            }
+            var samplers = ["textureSampler"];
+            ImageProcessingConfiguration.PrepareSamplers(samplers, this._defines);
 
-            if (this._fromLinearSpace) {
-                defines += "#define FROMLINEARSPACE\r\n";
-            }
+            var uniforms = ["scale"];
+            ImageProcessingConfiguration.PrepareUniforms(uniforms, this._defines);
 
-            this.updateEffect(defines, null, samplers);
+            this.updateEffect(defines, uniforms, samplers);
         }
 
-        // Statics
-        private static _VIGNETTEMODE_MULTIPLY = 0;
-        private static _VIGNETTEMODE_OPAQUE = 1;
+        public dispose(camera?: Camera): void {
+            super.dispose(camera);
 
-        public static get VIGNETTEMODE_MULTIPLY(): number {
-            return ImageProcessingPostProcess._VIGNETTEMODE_MULTIPLY;
-        }
-
-        public static get VIGNETTEMODE_OPAQUE(): number {
-            return ImageProcessingPostProcess._VIGNETTEMODE_OPAQUE;
+            if (this._imageProcessingConfiguration && this._imageProcessingObserver) {
+                this._imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingObserver);
+            }
+            
+            this.imageProcessingConfiguration.applyByPostProcess = false;
         }
     }
 }

+ 31 - 1
src/Shaders/ShadersInclude/helperFunctions.fx

@@ -1,4 +1,10 @@
-mat3 transposeMat3(mat3 inMatrix) {
+const float PI = 3.1415926535897932384626433832795;
+
+const float LinearEncodePowerApprox = 2.2;
+const float GammaEncodePowerApprox = 1.0 / LinearEncodePowerApprox;
+const vec3 LuminanceEncodeApprox = vec3(0.2126, 0.7152, 0.0722);
+
+mat3 transposeMat3(mat3 inMatrix) {
 	vec3 i0 = inMatrix[0];
 	vec3 i1 = inMatrix[1];
 	vec3 i2 = inMatrix[2];
@@ -10,4 +16,28 @@
 		);
 
 	return outMatrix;
+}
+
+vec3 applyEaseInOut(vec3 x){
+	return x * x * (3.0 - 2.0 * x);
+}
+
+vec3 toLinearSpace(vec3 color)
+{
+	return pow(color, vec3(LinearEncodePowerApprox));
+}
+
+vec3 toGammaSpace(vec3 color)
+{
+    return pow(color, vec3(GammaEncodePowerApprox));
+}
+
+float square(float value)
+{
+    return value * value;
+}
+
+float getLuminance(vec3 color)
+{
+    return clamp(dot(color, LuminanceEncodeApprox), 0., 1.);
 }

+ 24 - 0
src/Shaders/ShadersInclude/imageProcessingDeclaration.fx

@@ -0,0 +1,24 @@
+#ifdef EXPOSURE
+	uniform float exposureLinear;
+#endif
+
+#ifdef CONTRAST
+	uniform float contrast;
+#endif
+
+#ifdef VIGNETTE
+	uniform vec2 vInverseScreenSize;
+	uniform vec4 vignetteSettings1;
+	uniform vec4 vignetteSettings2;
+#endif
+
+#ifdef COLORCURVES
+	uniform vec4 vCameraColorCurveNegative;
+	uniform vec4 vCameraColorCurveNeutral;
+	uniform vec4 vCameraColorCurvePositive;
+#endif
+
+#ifdef COLORGRADING
+	uniform sampler2D txColorTransform;
+	uniform vec4 colorTransformSettings;
+#endif

+ 120 - 0
src/Shaders/ShadersInclude/imageProcessingFunctions.fx

@@ -0,0 +1,120 @@
+#ifdef COLORGRADING
+/** 
+ * Polyfill for SAMPLE_TEXTURE_3D, which is unsupported in WebGL.
+ * sampler3dSetting.x = textureOffset (0.5 / textureSize).
+ * sampler3dSetting.y = textureSize.
+ */
+vec3 sampleTexture3D(sampler2D colorTransform, vec3 color, vec2 sampler3dSetting)
+{
+	float sliceSize = 2.0 * sampler3dSetting.x; // Size of 1 slice relative to the texture, for example 1/8
+
+#ifdef SAMPLER3DGREENDEPTH
+	float sliceContinuous = (color.g - sampler3dSetting.x) * sampler3dSetting.y;
+#else
+	float sliceContinuous = (color.b - sampler3dSetting.x) * sampler3dSetting.y;
+#endif
+	float sliceInteger = floor(sliceContinuous);
+
+	// Note: this is mathematically equivalent to fract(sliceContinuous); but we use explicit subtract
+	// rather than separate fract() for correct results near slice boundaries (matching sliceInteger choice)
+	float sliceFraction = sliceContinuous - sliceInteger;
+
+#ifdef SAMPLER3DGREENDEPTH
+	vec2 sliceUV = color.rb;
+#else
+	vec2 sliceUV = color.rg;
+#endif
+	
+	sliceUV.x *= sliceSize;
+	sliceUV.x += sliceInteger * sliceSize;
+
+	sliceUV = clamp(sliceUV, 0., 1.);
+
+	vec4 slice0Color = texture2D(colorTransform, sliceUV);
+
+	sliceUV.x += sliceSize;
+	
+	sliceUV = clamp(sliceUV, 0., 1.);
+	vec4 slice1Color = texture2D(colorTransform, sliceUV);
+
+	vec3 result = mix(slice0Color.rgb, slice1Color.rgb, sliceFraction);
+
+#ifdef SAMPLER3DBGRMAP
+	color.rgb = result.rgb;
+#else
+	color.rgb = result.bgr;
+#endif
+
+	return color;
+}
+#endif
+
+vec4 applyImageProcessing(vec4 result) {
+
+#ifdef EXPOSURE
+	result.rgb *= exposureLinear;
+#endif
+
+#ifdef VIGNETTE
+		//vignette
+		vec2 viewportXY = gl_FragCoord.xy * vInverseScreenSize;
+		viewportXY = viewportXY * 2.0 - 1.0;
+		vec3 vignetteXY1 = vec3(viewportXY * vignetteSettings1.xy + vignetteSettings1.zw, 1.0);
+		float vignetteTerm = dot(vignetteXY1, vignetteXY1);
+		float vignette = pow(vignetteTerm, vignetteSettings2.w);
+
+		// Interpolate between the artist 'color' and white based on the physical transmission value 'vignette'.
+		vec3 vignetteColor = vignetteSettings2.rgb;
+
+	#ifdef VIGNETTEBLENDMODEMULTIPLY
+		vec3 vignetteColorMultiplier = mix(vignetteColor, vec3(1, 1, 1), vignette);
+		result.rgb *= vignetteColorMultiplier;
+	#endif
+
+	#ifdef VIGNETTEBLENDMODEOPAQUE
+		result.rgb = mix(vignetteColor, result.rgb, vignette);
+	#endif
+#endif
+	
+#ifdef TONEMAPPING
+	const float tonemappingCalibration = 1.590579;
+	result.rgb = 1.0 - exp2(-tonemappingCalibration * result.rgb);
+#endif
+
+	// Going back to gamma space
+	result.rgb = toGammaSpace(result.rgb);
+	result.rgb = clamp(result.rgb, 0.0, 1.0);
+
+#ifdef CONTRAST
+	// Contrast
+	vec3 resultHighContrast = applyEaseInOut(result.rgb);
+
+	if (contrast < 1.0) {
+		// Decrease contrast: interpolate towards zero-contrast image (flat grey)
+		result.rgb = mix(vec3(0.5, 0.5, 0.5), result.rgb, contrast);
+	} else {
+		// Increase contrast: apply simple shoulder-toe high contrast curve
+		result.rgb = mix(result.rgb, resultHighContrast, contrast - 1.0);
+	}
+#endif
+
+	// Apply Color Transform
+#ifdef COLORGRADING
+	vec3 colorTransformInput = result.rgb * colorTransformSettings.xxx + colorTransformSettings.yyy;
+	vec3 colorTransformOutput = sampleTexture3D(txColorTransform, colorTransformInput, colorTransformSettings.yz).rgb;
+
+	result.rgb = mix(result.rgb, colorTransformOutput, colorTransformSettings.www);
+#endif
+
+#ifdef COLORCURVES
+	// Apply Color Curves
+	float luma = getLuminance(result.rgb);
+	vec2 curveMix = clamp(vec2(luma * 3.0 - 1.5, luma * -3.0 + 1.5), vec2(0.0), vec2(1.0));
+	vec4 colorCurve = vCameraColorCurveNeutral + curveMix.x * vCameraColorCurvePositive - curveMix.y * vCameraColorCurveNegative;
+
+	result.rgb *= colorCurve.rgb;
+	result.rgb = mix(vec3(luma), result.rgb, colorCurve.a);
+#endif
+
+	return result;
+}

+ 0 - 9
src/Shaders/ShadersInclude/pbrFragmentDeclaration.fx

@@ -44,15 +44,6 @@ uniform vec3 vReflectivityInfos;
 uniform vec2 vMicroSurfaceSamplerInfos;
 #endif
 
-#ifdef OPACITYFRESNEL
-uniform vec4 opacityParts;
-#endif
-
-#ifdef EMISSIVEFRESNEL
-uniform vec4 emissiveLeftColor;
-uniform vec4 emissiveRightColor;
-#endif
-
 // Refraction Reflection
 #if defined(REFLECTIONMAP_SPHERICAL) || defined(REFLECTIONMAP_PROJECTION) || defined(REFRACTION)
 uniform mat4 view;

+ 11 - 64
src/Shaders/ShadersInclude/pbrFunctions.fx

@@ -3,25 +3,14 @@
 #define FRESNEL_MAXIMUM_ON_ROUGH 0.25
 
 // PBR CUSTOM CONSTANTS
-const float kPi = 3.1415926535897932384626433832795;
 const float kRougnhessToAlphaScale = 0.1;
 const float kRougnhessToAlphaOffset = 0.29248125;
 
-float Square(float value)
-{
-    return value * value;
-}
-
-float getLuminance(vec3 color)
-{
-    return clamp(dot(color, vec3(0.2126, 0.7152, 0.0722)), 0., 1.);
-}
-
 float convertRoughnessToAverageSlope(float roughness)
 {
     // Calculate AlphaG as square of roughness; add epsilon to avoid numerical issues
     const float kMinimumVariance = 0.0005;
-    float alphaG = Square(roughness) + kMinimumVariance;
+    float alphaG = square(roughness) + kMinimumVariance;
     return alphaG;
 }
 
@@ -68,9 +57,9 @@ float normalDistributionFunction_TrowbridgeReitzGGX(float NdotH, float alphaG)
     // Note: alphaG is average slope (gradient) of the normals in slope-space.
     // It is also the (trigonometric) tangent of the median distribution value, i.e. 50% of normals have
     // a tangent (gradient) closer to the macrosurface than this slope.
-    float a2 = Square(alphaG);
+    float a2 = square(alphaG);
     float d = NdotH * NdotH * (a2 - 1.0) + 1.0;
-    return a2 / (kPi * d * d);
+    return a2 / (PI * d * d);
 }
 
 vec3 fresnelSchlickGGX(float VdotH, vec3 reflectance0, vec3 reflectance90)
@@ -78,7 +67,7 @@ vec3 fresnelSchlickGGX(float VdotH, vec3 reflectance0, vec3 reflectance90)
     return reflectance0 + (reflectance90 - reflectance0) * pow(clamp(1.0 - VdotH, 0., 1.), 5.0);
 }
 
-vec3 FresnelSchlickEnvironmentGGX(float VdotN, vec3 reflectance0, vec3 reflectance90, float smoothness)
+vec3 fresnelSchlickEnvironmentGGX(float VdotN, vec3 reflectance0, vec3 reflectance90, float smoothness)
 {
     // Schlick fresnel approximation, extended with basic smoothness term so that rough surfaces do not approach reflectance90 at grazing angle
     float weight = mix(FRESNEL_MAXIMUM_ON_ROUGH, 1.0, smoothness);
@@ -109,7 +98,7 @@ float computeDiffuseTerm(float NdotL, float NdotV, float VdotH, float roughness)
         (1.0 + (diffuseFresnel90 - 1.0) * diffuseFresnelNL) *
         (1.0 + (diffuseFresnel90 - 1.0) * diffuseFresnelNV);
 
-    return fresnel * NdotL / kPi;
+    return fresnel * NdotL / PI;
 }
 
 float adjustRoughnessFromLightProperties(float roughness, float lightRadius, float lightDistance)
@@ -136,51 +125,9 @@ float computeDefaultMicroSurface(float microSurface, vec3 reflectivityColor)
     return microSurface;
 }
 
-vec3 toLinearSpace(vec3 color)
-{
-    return vec3(pow(color.r, 2.2), pow(color.g, 2.2), pow(color.b, 2.2));
-}
-
-vec3 toGammaSpace(vec3 color)
-{
-    return vec3(pow(color.r, 1.0 / 2.2), pow(color.g, 1.0 / 2.2), pow(color.b, 1.0 / 2.2));
-}
-
-#ifdef CAMERATONEMAP
-    vec3 toneMaps(vec3 color)
-    {
-        color = max(color, 0.0);
-
-        // TONE MAPPING / EXPOSURE
-        color.rgb = color.rgb * vCameraInfos.x;
-
-        float tuning = 1.5; // TODO: sync up so e.g. 18% greys are matched to exposure appropriately
-        // PI Test
-        // tuning *=  kPi;
-        vec3 tonemapped = 1.0 - exp2(-color.rgb * tuning); // simple local photographic tonemapper
-        color.rgb = mix(color.rgb, tonemapped, 1.0);
-        return color;
-    }
-#endif
-
-#ifdef CAMERACONTRAST
-    vec4 contrasts(vec4 color)
-    {
-        color = clamp(color, 0.0, 1.0);
-
-        vec3 resultHighContrast = color.rgb * color.rgb * (3.0 - 2.0 * color.rgb);
-        float contrast = vCameraInfos.y;
-        if (contrast < 1.0)
-        {
-            // Decrease contrast: interpolate towards zero-contrast image (flat grey)
-            color.rgb = mix(vec3(0.5, 0.5, 0.5), color.rgb, contrast);
-        }
-        else
-        {
-            // Increase contrast: apply simple shoulder-toe high contrast curve
-            color.rgb = mix(color.rgb, resultHighContrast, contrast - 1.0);
-        }
-
-        return color;
-    }
-#endif
+// For typical incident reflectance range (between 4% to 100%) set the grazing reflectance to 100% for typical fresnel effect.
+// For very low reflectance range on highly diffuse objects (below 4%), incrementally reduce grazing reflecance to 0%.
+float fresnelGrazingReflectance(float reflectance0) {
+	float reflectance90 = clamp(reflectance0 * 25.0, 0.0, 1.0);
+	return reflectance90;
+}

+ 0 - 3
src/Shaders/ShadersInclude/pbrUboDeclaration.fx

@@ -30,9 +30,6 @@ uniform Material
 	uniform vec2 vMicrosurfaceTextureLods;
 	uniform vec4 vReflectivityColor;
 	uniform vec3 vEmissiveColor;
-	uniform vec4 opacityParts;
-	uniform vec4 emissiveLeftColor;
-	uniform vec4 emissiveRightColor;
 
 	uniform float pointSize;
 };

+ 11 - 13
src/Shaders/default.fragment.fx

@@ -99,15 +99,9 @@ varying vec3 vDirectionW;
 
 #endif
 
-#ifdef CAMERACOLORGRADING
-	#include<colorGradingDefinition>	
-	#include<colorGrading>
-#endif
+#include<imageProcessingDeclaration>
 
-#ifdef CAMERACOLORCURVES
-	#include<colorCurvesDefinition>
-	#include<colorCurves>
-#endif
+#include<imageProcessingFunctions>
 
 #include<bumpFragmentFunctions>
 #include<clipPlaneFragmentDeclaration>
@@ -361,12 +355,16 @@ void main(void) {
 #include<logDepthFragment>
 #include<fogFragment>
 
-#ifdef CAMERACOLORGRADING
-	color = colorGrades(color);
+// Apply image processing if relevant. As this applies in linear space, 
+// We first move from gamma to linear.
+#ifdef IMAGEPROCESSINGPOSTPROCESS
+	color.rgb = toLinearSpace(color.rgb);
+#else
+	#ifdef IMAGEPROCESSING
+		color.rgb = toLinearSpace(color.rgb);
+		color = applyImageProcessing(color);
+	#endif
 #endif
 
-#ifdef CAMERACOLORCURVES
-	color.rgb = applyColorCurves(color.rgb);
-#endif
 	gl_FragColor = color;
 }

+ 14 - 115
src/Shaders/imageProcessing.fragment.fx

@@ -2,130 +2,29 @@
 varying vec2 vUV;
 uniform sampler2D textureSampler;
 
-const float GammaEncodePowerApprox = 1.0 / 2.2;
-const float LinearEncodePowerApprox = 2.2;
-const vec3 RGBLuminanceCoefficients = vec3(0.2126, 0.7152, 0.0722);
+#include<imageProcessingDeclaration>
 
-uniform float contrast;
-uniform vec4 vignetteSettings1;
-uniform vec4 vignetteSettings2;
-uniform float cameraExposureLinear;
-uniform vec4 vCameraColorCurveNegative;
-uniform vec4 vCameraColorCurveNeutral;
-uniform vec4 vCameraColorCurvePositive;
-uniform sampler2D txColorTransform;
-uniform vec4 colorTransformSettings;
+#include<helperFunctions>
 
-vec3 applyEaseInOut(vec3 x){
-	return x * x * (3.0 - 2.0 * x);
-}
+#include<imageProcessingFunctions>
 
-/** 
- * Polyfill for SAMPLE_TEXTURE_3D, which is unsupported in WebGL.
- * colorTransformSettings.y = textureOffset (0.5 / textureSize).
- * colorTransformSettings.z = textureSize.
- */
-vec3 sampleTexture3D(sampler2D colorTransform, vec3 color)
+void main(void)
 {
-	float sliceSize = 2.0 * colorTransformSettings.y; // Size of 1 slice relative to the texture, for example 1/8
-
-	float sliceContinuous = (color.y - colorTransformSettings.y) * colorTransformSettings.z;
-	float sliceInteger = floor(sliceContinuous);
-
-	// Note: this is mathematically equivalent to fract(sliceContinuous); but we use explicit subtract
-	// rather than separate fract() for correct results near slice boundaries (matching sliceInteger choice)
-	float sliceFraction = sliceContinuous - sliceInteger;
-
-	vec2 sliceUV = color.xz;
-	
-	sliceUV.x *= sliceSize;
-	sliceUV.x += sliceInteger * sliceSize;
-
-	vec4 slice0Color = texture2D(colorTransform, sliceUV);
-
-	sliceUV.x += sliceSize;
-	vec4 slice1Color = texture2D(colorTransform, sliceUV);
-
-	vec3 result = mix(slice0Color.rgb, slice1Color.rgb, sliceFraction);
-	color.rgb = result.bgr;
-
-	return color;
-}
-
-vec4 applyImageProcessing(vec4 result, vec2 viewportXY){
-
-#ifndef FROMLINEARSPACE
-	// Need to move to linear space for subsequent operations
-	result.rgb = pow(result.rgb, vec3(LinearEncodePowerApprox));
-#endif
-
-	result.rgb *= cameraExposureLinear;
-
-#ifdef VIGNETTE
-		//vignette
-		vec3 vignetteXY1 = vec3(viewportXY * vignetteSettings1.xy + vignetteSettings1.zw, 1.0);
-		float vignetteTerm = dot(vignetteXY1, vignetteXY1);
-		float vignette = pow(vignetteTerm, vignetteSettings2.w);
-
-		// Interpolate between the artist 'color' and white based on the physical transmission value 'vignette'.
-		vec3 vignetteColor = vignetteSettings2.rgb;
+	vec4 result = texture2D(textureSampler, vUV);
 
-	#ifdef VIGNETTEBLENDMODEMULTIPLY
-		vec3 vignetteColorMultiplier = mix(vignetteColor, vec3(1, 1, 1), vignette);
-		result.rgb *= vignetteColorMultiplier;
+#ifdef IMAGEPROCESSING
+	#ifndef FROMLINEARSPACE
+		// Need to move to linear space for subsequent operations.
+		result.rgb = toLinearSpace(result.rgb);
 	#endif
 
-	#ifdef VIGNETTEBLENDMODEOPAQUE
-		result.rgb = mix(vignetteColor, result.rgb, vignette);
+	result = applyImageProcessing(result);
+#else
+	// In case where the input is in linear space we at least need to put it back in gamma.
+	#ifdef FROMLINEARSPACE
+		result = applyImageProcessing(result);
 	#endif
-
 #endif
-	
-#ifdef TONEMAPPING	
-	float tonemappingCalibration = 1.590579;
-	result.rgb = 1.0 - exp2(-tonemappingCalibration * result.rgb);
-#endif
-
-	// Going back to gamma space
-	result.rgb = pow(result.rgb, vec3(GammaEncodePowerApprox));
-	result.rgb = clamp(result.rgb, 0.0, 1.0);
-
-	// Contrast
-	vec3 resultHighContrast = applyEaseInOut(result.rgb);
-
-	if (contrast < 1.0) {
-		result.rgb = mix(vec3(0.5, 0.5, 0.5), result.rgb, contrast);
-	} else {
-		result.rgb = mix(result.rgb, resultHighContrast, contrast - 1.0);
-	}
-
-	// Apply Color Transform
-#ifdef COLORGRADING
-	vec3 colorTransformInput = result.rgb * colorTransformSettings.xxx + colorTransformSettings.yyy;
-	vec3 colorTransformOutput = sampleTexture3D(txColorTransform, colorTransformInput).rgb;
-
-	result.rgb = mix(result.rgb, colorTransformOutput, colorTransformSettings.www);
-#endif
-
-#ifdef COLORCURVES
-	// Apply Color Curves
-	float luma = dot(result.rgb, RGBLuminanceCoefficients);
-	vec2 curveMix = clamp(vec2(luma * 3.0 - 1.5, luma * -3.0 + 1.5), vec2(0.0), vec2(1.0));
-	vec4 colorCurve = vCameraColorCurveNeutral + curveMix.x * vCameraColorCurvePositive - curveMix.y * vCameraColorCurveNegative;
-
-	result.rgb *= colorCurve.rgb;
-	result.rgb = mix(vec3(luma), result.rgb, colorCurve.a);
-#endif
-
-	return result;
-}
-
-void main(void) 
-{
-	vec4 result = texture2D(textureSampler, vUV);
-
-	vec2 viewportXY = vUV * 2.0 - 1.0;
-	result = applyImageProcessing(result, viewportXY);
 
 	gl_FragColor = result;
 }

+ 175 - 168
src/Shaders/pbr.fragment.fx

@@ -68,13 +68,9 @@ varying vec2 vMicroSurfaceSamplerUV;
 uniform sampler2D microSurfaceSampler;
 #endif
 
-// Fresnel
-#include<fresnelFunction>
-
 // Refraction
 #ifdef REFRACTION
 
-
 #ifdef REFRACTIONMAP_3D
 uniform samplerCube refractionCubeSampler;
 #else
@@ -84,50 +80,41 @@ uniform sampler2D refraction2DSampler;
 
 // Reflection
 #ifdef REFLECTION
+	#ifdef REFLECTIONMAP_3D
+		uniform samplerCube reflectionCubeSampler;
+	#else
+		uniform sampler2D reflection2DSampler;
+	#endif
 
-#ifdef REFLECTIONMAP_3D
-uniform samplerCube reflectionCubeSampler;
-#else
-uniform sampler2D reflection2DSampler;
-#endif
+	#ifdef REFLECTIONMAP_SKYBOX
+		varying vec3 vPositionUVW;
+	#else
+		#if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED)
+			varying vec3 vDirectionW;
+		#endif
 
-#ifdef REFLECTIONMAP_SKYBOX
-varying vec3 vPositionUVW;
-#else
-#if defined(REFLECTIONMAP_EQUIRECTANGULAR_FIXED) || defined(REFLECTIONMAP_MIRROREDEQUIRECTANGULAR_FIXED)
-varying vec3 vDirectionW;
-#endif
+	#endif
 
+	#include<reflectionFunction>
 #endif
 
-#include<reflectionFunction>
-
+// Forces linear space for image processing
+#ifndef FROMLINEARSPACE
+	#define FROMLINEARSPACE;
 #endif
 
-#ifdef CAMERACOLORGRADING
-	#include<colorGradingDefinition>
-#endif
+#include<imageProcessingDeclaration>
 
-#ifdef CAMERACOLORCURVES
-	#include<colorCurvesDefinition>
-#endif
+#include<helperFunctions>
+
+#include<imageProcessingFunctions>
 
 // PBR
 #include<shadowsFragmentFunctions>
 #include<pbrFunctions>
-
-#ifdef CAMERACOLORGRADING
-	#include<colorGrading>
-#endif
-
-#ifdef CAMERACOLORCURVES
-	#include<colorCurves>
-#endif
-
 #include<harmonicsFunctions>
 #include<pbrLightFunctions>
 
-#include<helperFunctions>
 #include<bumpFragmentFunctions>
 #include<clipPlaneFragmentDeclaration>
 #include<logDepthDeclaration>
@@ -138,9 +125,10 @@ varying vec3 vDirectionW;
 void main(void) {
 #include<clipPlaneFragment>
 
+// _______________________________________________________________________________
+// _____________________________ Geometry Information ____________________________
 	vec3 viewDirectionW = normalize(vEyePosition - vPositionW);
 
-	// Bump
 #ifdef NORMAL
 	vec3 normalW = normalize(vNormalW);
 #else
@@ -153,6 +141,7 @@ void main(void) {
 	normalW = gl_FrontFacing ? normalW : -normalW;
 #endif
 
+// _____________________________ Albedo Information ______________________________
 	// Albedo
 	vec3 surfaceAlbedo = vAlbedoColor.rgb;
 
@@ -169,15 +158,32 @@ void main(void) {
 	surfaceAlbedo *= vAlbedoInfos.y;
 #endif
 
-#ifndef LINKREFRACTIONTOTRANSPARENCY
-	#if defined(ALPHATEST) && defined(ALPHATESTVALUE)
+// _____________________________ Alpha Information _______________________________
+#ifdef OPACITY
+	vec4 opacityMap = texture2D(opacitySampler, vOpacityUV + uvOffset);
+
+	#ifdef OPACITYRGB
+		alpha = getLuminance(opacityMap.rgb);
+	#else
+		alpha *= opacityMap.a;
+	#endif
+
+	alpha *= vOpacityInfos.y;
+#endif
+
+#ifdef VERTEXALPHA
+	alpha *= vColor.a;
+#endif
+
+#if !defined(LINKREFRACTIONTOTRANSPARENCY) && !defined(ALPHAFRESNEL)
+	#ifdef ALPHATEST
 		if (alpha <= ALPHATESTVALUE)
 			discard;
-			
-			#ifndef ALPHABLEND
-				// Prevent to blend with the canvas.
-				alpha = 1.0;
-			#endif
+
+		#ifndef ALPHABLEND
+			// Prevent to blend with the canvas.
+			alpha = 1.0;
+		#endif
 	#endif
 #endif
 
@@ -185,7 +191,7 @@ void main(void) {
 	surfaceAlbedo *= vColor.rgb;
 #endif
 
-	// Ambient color
+// _____________________________ AO    Information _______________________________
 	vec3 ambientOcclusionColor = vec3(1., 1., 1.);
 
 #ifdef AMBIENT
@@ -196,7 +202,7 @@ void main(void) {
 	ambientOcclusionColor = mix(ambientOcclusionColor, ambientOcclusionColorMap, vAmbientInfos.z);
 #endif
 
-	// Reflectivity map
+// _____________________________ Reflectivity Info _______________________________
 	float microSurface = vReflectivityColor.a;
 	vec3 surfaceReflectivityColor = vReflectivityColor.rgb;
 
@@ -268,78 +274,44 @@ void main(void) {
 	#endif
 #endif
 
-	// Compute N dot V.
-	float NdotV = clamp(dot(normalW, viewDirectionW),0., 1.) + 0.00001;
-
 	// Adapt microSurface.
 	microSurface = clamp(microSurface, 0., 1.);
-
 	// Compute roughness.
 	float roughness = 1. - microSurface;
 
-	#ifdef LIGHTMAP
-  		vec3 lightmapColor = texture2D(lightmapSampler, vLightmapUV + uvOffset).rgb * vLightmapInfos.y;
-  	#endif
-
-	float NdotL = -1.;
+// _____________________________ Alpha Fresnel ___________________________________
+#ifdef ALPHAFRESNEL
+	// Convert approximate perceptual opacity (gamma-encoded opacity) to linear opacity (absorptance, or inverse transmission)
+	// for use with the linear HDR render target. The final composition will be converted back to gamma encoded values for eventual display.
+	// Uses power 2.0 rather than 2.2 for simplicity/efficiency, and because the mapping does not need to map the gamma applied to RGB.
+	float opacityPerceptual = alpha;
+	float opacity0 = opacityPerceptual * opacityPerceptual;
+	float opacity90 = fresnelGrazingReflectance(opacity0);
 
-	// Compute reflectance.
-	float reflectance = max(max(surfaceReflectivityColor.r, surfaceReflectivityColor.g), surfaceReflectivityColor.b);
+	vec3 normalForward = faceforward(normalW, -viewDirectionW, normalW);
 
-	// For typical incident reflectance range (between 4% to 100%) set the grazing reflectance to 100% for typical fresnel effect.
-    // For very low reflectance range on highly diffuse objects (below 4%), incrementally reduce grazing reflecance to 0%.
-    float reflectance90 = clamp(reflectance * 25.0, 0.0, 1.0);
-	vec3 specularEnvironmentR0 = surfaceReflectivityColor.rgb;
-	vec3 specularEnvironmentR90 = vec3(1.0, 1.0, 1.0) * reflectance90;
-
-	// Lighting
-	vec3 diffuseBase = vec3(0., 0., 0.);
-
-#ifdef SPECULARTERM
-	vec3 specularBase = vec3(0., 0., 0.);
-#endif
+	// Calculate the appropriate linear opacity for the current viewing angle (formally, this quantity is the "directional absorptance").
+	alpha = fresnelSchlickEnvironmentGGX(clamp(dot(V, normalForward), 0.0, 1.0), vec3(opacity0), vec3(opacity90), sqrt(microSurface)).x;
 	
-	lightingInfo info;
-	float shadow = 1.; // 1 - shadowLevel
-
-#include<lightFragment>[0..maxSimultaneousLights]
-
-	vec3 lightDiffuseContribution = diffuseBase;
-
-#ifdef SPECULARTERM
-	vec3 lightSpecularContribution = specularBase * vLightingIntensity.w;
-#endif
-
-#ifdef OPACITY
-	vec4 opacityMap = texture2D(opacitySampler, vOpacityUV + uvOffset);
+	#ifdef ALPHATEST
+		if (alpha <= ALPHATESTVALUE)
+			discard;
 
-	#ifdef OPACITYRGB
-		opacityMap.rgb = opacityMap.rgb * vec3(0.3, 0.59, 0.11);
-		alpha *= (opacityMap.x + opacityMap.y + opacityMap.z)* vOpacityInfos.y;
-	#else
-		alpha *= opacityMap.a * vOpacityInfos.y;
+		#ifndef ALPHABLEND
+			// Prevent to blend with the canvas.
+			alpha = 1.0;
+		#endif
 	#endif
 #endif
 
-#ifdef VERTEXALPHA
-	alpha *= vColor.a;
-#endif
-
-#ifdef OPACITYFRESNEL
-	float opacityFresnelTerm = computeFresnelTerm(viewDirectionW, normalW, opacityParts.z, opacityParts.w);
-
-	alpha += opacityParts.x * (1.0 - opacityFresnelTerm) + opacityFresnelTerm * opacityParts.y;
-#endif
-
-	// Refraction
-	vec3 surfaceRefractionColor = vec3(0., 0., 0.);
-
-	// Go mat -> blurry reflexion according to microSurface
+// _____________________________ Compute LODs Fetch ____________________________________
 #ifdef LODBASEDMICROSFURACE
 	float alphaG = convertRoughnessToAverageSlope(roughness);
 #endif
 
+// _____________________________ Refraction Info _______________________________________
 #ifdef REFRACTION
+	vec3 surfaceRefractionColor = vec3(0., 0., 0.);
 	vec3 refractionVector = refract(-viewDirectionW, normalW, vRefractionInfos.y);
 
 	#ifdef LODBASEDMICROSFURACE
@@ -397,11 +369,10 @@ void main(void) {
 	#endif
 #endif
 
-	// Reflection
+// _____________________________ Reflection Info _______________________________________
+#ifdef REFLECTION
 	vec3 environmentRadiance = vReflectionColor.rgb;
 	vec3 environmentIrradiance = vReflectionColor.rgb;
-
-#ifdef REFLECTION
 	vec3 vReflectionUVW = computeReflectionCoords(vec4(vPositionW, 1.0), normalW);
 
 	#ifdef LODBASEDMICROSFURACE
@@ -468,16 +439,39 @@ void main(void) {
 	#endif
 #endif
 
-	environmentRadiance *= vLightingIntensity.z;
-	environmentIrradiance *= vLightingIntensity.z;
+// ____________________________________________________________________________________
+// _____________________________ Direct Lighting Param ________________________________
+	// Compute N dot V.
+	float NdotV = clamp(dot(normalW, viewDirectionW),0., 1.) + 0.00001;
 
-	// Specular Environment Fresnel.
-	vec3 specularEnvironmentReflectance = FresnelSchlickEnvironmentGGX(clamp(NdotV, 0., 1.), specularEnvironmentR0, specularEnvironmentR90, sqrt(microSurface));
+	// Compute reflectance.
+	float reflectance = max(max(surfaceReflectivityColor.r, surfaceReflectivityColor.g), surfaceReflectivityColor.b);
+	float reflectance90 = fresnelGrazingReflectance(reflectance);
+	vec3 specularEnvironmentR0 = surfaceReflectivityColor.rgb;
+	vec3 specularEnvironmentR90 = vec3(1.0, 1.0, 1.0) * reflectance90;
 
-	// Compute refractance
-	vec3 refractance = vec3(0.0, 0.0, 0.0);
+	// Environment Reflectance
+	vec3 specularEnvironmentReflectance = fresnelSchlickEnvironmentGGX(clamp(NdotV, 0., 1.), specularEnvironmentR0, specularEnvironmentR90, sqrt(microSurface));
+
+// _____________________________ Direct Lighting Info __________________________________
+	vec3 diffuseBase = vec3(0., 0., 0.);
+#ifdef SPECULARTERM
+	vec3 specularBase = vec3(0., 0., 0.);
+#endif
+
+#ifdef LIGHTMAP
+	vec3 lightmapColor = texture2D(lightmapSampler, vLightmapUV + uvOffset).rgb * vLightmapInfos.y;
+#endif
+
+	lightingInfo info;
+	float shadow = 1.; // 1 - shadowLevel
+	float NdotL = -1.;
+
+#include<lightFragment>[0..maxSimultaneousLights]
 
+// _____________________________ Refractance+Tint ________________________________
 #ifdef REFRACTION
+	vec3 refractance = vec3(0.0, 0.0, 0.0);
 	vec3 transmission = vec3(1.0, 1.0, 1.0);
 	#ifdef LINKREFRACTIONTOTRANSPARENCY
 		// Transmission based on alpha.
@@ -510,66 +504,90 @@ void main(void) {
 	transmission *= 1.0 - specularEnvironmentReflectance;
 
 	// Should baked in diffuse.
-	refractance = surfaceRefractionColor * transmission;
+	refractance = transmission;
 #endif
 
-	// Apply Energy Conservation taking in account the environment level only if the environment is present.
+// ______________________________________________________________________________
+// _____________________________ Energy Conservation  ___________________________
+	// Apply Energy Conservation taking in account the environment level only if 
+	// the environment is present.
 	surfaceAlbedo.rgb = (1. - reflectance) * surfaceAlbedo.rgb;
 
-	refractance *= vLightingIntensity.z;
-	environmentRadiance *= specularEnvironmentReflectance;
+// _____________________________ Diffuse ________________________________________
+	vec3 finalDiffuse = diffuseBase;
+	finalDiffuse.rgb += vAmbientColor;
+	finalDiffuse *= surfaceAlbedo.rgb;
+	finalDiffuse = max(finalDiffuse, 0.0);
 
-	// Emissive
-	vec3 surfaceEmissiveColor = vEmissiveColor;
-#ifdef EMISSIVE
-	vec3 emissiveColorTex = texture2D(emissiveSampler, vEmissiveUV + uvOffset).rgb;
-	surfaceEmissiveColor = toLinearSpace(emissiveColorTex.rgb) * surfaceEmissiveColor;
-	surfaceEmissiveColor *=  vEmissiveInfos.y;
+// _____________________________ Irradiance ______________________________________
+#ifdef REFLECTION
+	vec3 finalIrradiance = environmentIrradiance;
+	finalIrradiance *= surfaceAlbedo.rgb;
 #endif
 
-#ifdef EMISSIVEFRESNEL
-	float emissiveFresnelTerm = computeFresnelTerm(viewDirectionW, normalW, emissiveRightColor.a, emissiveLeftColor.a);
-
-	surfaceEmissiveColor *= emissiveLeftColor.rgb * (1.0 - emissiveFresnelTerm) + emissiveFresnelTerm * emissiveRightColor.rgb;
+// _____________________________ Specular ________________________________________
+#ifdef SPECULARTERM
+	vec3 finalSpecular = specularBase;
+	finalSpecular *= surfaceReflectivityColor;
+	finalSpecular = max(finalSpecular, 0.0);
 #endif
 
-	// Composition
-	vec3 finalDiffuse = lightDiffuseContribution;
-#ifndef EMISSIVEASILLUMINATION
-	finalDiffuse += surfaceEmissiveColor;
+// _____________________________ Radiance_________________________________________
+#ifdef REFLECTION
+	vec3 finalRadiance = environmentRadiance;
+	finalRadiance *= specularEnvironmentReflectance;
 #endif
 
-finalDiffuse.rgb += vAmbientColor;
-finalDiffuse *= surfaceAlbedo.rgb;
-finalDiffuse = max(finalDiffuse, 0.0);
-finalDiffuse = (finalDiffuse * vLightingIntensity.x + surfaceAlbedo.rgb * environmentIrradiance) * ambientOcclusionColor;
+// _____________________________ Refraction ______________________________________
+#ifdef REFRACTION
+	vec3 finalRefraction = surfaceRefractionColor;
+	finalRefraction *= refractance;
+#endif
 
-float luminanceOverAlpha = 0.0;
-#ifdef RADIANCEOVERALPHA
-	luminanceOverAlpha += getLuminance(environmentRadiance);
+// _____________________________ Emissive ________________________________________
+	vec3 finalEmissive = vEmissiveColor;
+#ifdef EMISSIVE
+	vec3 emissiveColorTex = texture2D(emissiveSampler, vEmissiveUV + uvOffset).rgb;
+	finalEmissive *= toLinearSpace(emissiveColorTex.rgb);
+	finalEmissive *=  vEmissiveInfos.y;
 #endif
 
-#ifdef SPECULARTERM
-	vec3 finalSpecular = lightSpecularContribution * surfaceReflectivityColor;
-	#ifdef SPECULAROVERALPHA
+// _____________________________ Highlights on Alpha _____________________________
+#ifdef ALPHABLEND
+	float luminanceOverAlpha = 0.0;
+	#ifdef RADIANCEOVERALPHA
+		luminanceOverAlpha += getLuminance(environmentRadiance);
+	#endif
+
+	#if defined(SPECULARTERM) && defined(SPECULAROVERALPHA)
 		luminanceOverAlpha += getLuminance(finalSpecular);
 	#endif
-#else
-	vec3 finalSpecular = vec3(0.0);
-#endif
-finalSpecular *= vLightingIntensity.x;
 
-#if defined(RADIANCEOVERALPHA) || defined(SPECULAROVERALPHA)
-	alpha = clamp(alpha + luminanceOverAlpha * alpha, 0., 1.);
+	#if defined(RADIANCEOVERALPHA) || defined(SPECULAROVERALPHA)
+		alpha = clamp(alpha + luminanceOverAlpha * alpha, 0., 1.);
+	#endif
 #endif
 
-// Composition
-// Reflection already includes the environment intensity.
-vec4 finalColor = vec4(finalDiffuse + finalSpecular + environmentRadiance + refractance, alpha);
-#ifdef EMISSIVEASILLUMINATION
-	finalColor.rgb += (surfaceEmissiveColor * vLightingIntensity.y);
+// _______________________________________________________________________________
+// _____________________________ Composition _____________________________________
+	// Reflection already includes the environment intensity.
+	vec4 finalColor = vec4(finalDiffuse			* ambientOcclusionColor * vLightingIntensity.x +
+#ifdef REFLECTION
+						finalIrradiance			* ambientOcclusionColor * vLightingIntensity.z +
 #endif
+#ifdef SPECULARTERM
+						finalSpecular			* vLightingIntensity.x * vLightingIntensity.w +
+#endif
+#ifdef REFLECTION
+						finalRadiance			* vLightingIntensity.z +
+#endif
+#ifdef REFRACTION
+						finalRefraction			* vLightingIntensity.z +
+#endif
+						finalEmissive			* vLightingIntensity.y,
+						alpha);
 
+// _____________________________ LightMappping _____________________________________
 #ifdef LIGHTMAP
     #ifndef LIGHTMAPEXCLUDED
         #ifdef USELIGHTMAPASSHADOWMAP
@@ -580,35 +598,24 @@ vec4 finalColor = vec4(finalDiffuse + finalSpecular + environmentRadiance + refr
     #endif
 #endif
 
+// _____________________________ Finally ___________________________________________
 	finalColor = max(finalColor, 0.0);
 
-#ifdef CAMERATONEMAP
-	finalColor.rgb = toneMaps(finalColor.rgb);
-#endif
-
 #include<logDepthFragment>
 #include<fogFragment>(color, finalColor)
 
-#ifdef CAMERACONTRAST
-	finalColor = contrasts(finalColor);
+#ifdef IMAGEPROCESSINGPOSTPROCESS
+	// Sanitize output incase invalid normals or tangents have caused div by 0 or undefined behavior
+	// this also limits the brightness which helpfully reduces over-sparkling in bloom (native handles this in the bloom blur shader)
+	finalColor.rgb = clamp(finalColor.rgb, 0., 30.0);
+#else
+	// Alway run even to ensure going back to gamma space.
+	finalColor = applyImageProcessing(finalColor);
 #endif
 
-#ifdef LDROUTPUT
-	finalColor.rgb = toGammaSpace(finalColor.rgb);
-
-	finalColor.rgb = clamp(finalColor.rgb, 0., 1.);
-
-	#ifdef CAMERACOLORGRADING
-		finalColor = colorGrades(finalColor);
-	#endif
-
-	#ifdef CAMERACOLORCURVES
-		finalColor.rgb = applyColorCurves(finalColor.rgb);
-	#endif
-#else
-	//sanitize output incase invalid normals or tangents have caused div by 0 or undefined behavior
-	//this also limits the brightness which helpfully reduces over-sparkling in bloom (native handles this in the bloom blur shader)
-	finalColor.rgb = clamp(finalColor.rgb, 0., 30.0);
+#ifdef PREMULTIPLYALPHA
+	// Convert to associative (premultiplied) format if needed.
+	finalColor.rgb *= result.a;
 #endif
 
 	gl_FragColor = finalColor;

+ 21 - 0
src/Tools/babylon.decorators.ts

@@ -68,6 +68,15 @@
         return generateSerializableMember(7, sourceName); // color curves
     }
 
+    export function serializeAsColor4(sourceName?: string) {
+        return generateSerializableMember(8, sourceName); // color 4
+    }
+
+    export function serializeAsImageProcessingConfiguration(sourceName?: string) {
+        return generateSerializableMember(9, sourceName); // image processing
+    }
+
+
     export class SerializationHelper {
 
         public static Serialize<T>(entity: T, serializationObject?: any): any {
@@ -113,6 +122,12 @@
                         case 7:     // Color Curves
                             serializationObject[targetPropertyName] = sourceProperty.serialize();
                             break;
+                        case 8:     // Color 4
+                            serializationObject[targetPropertyName] = (<Color4>sourceProperty).asArray();
+                            break;
+                        case 9:     // Image Processing
+                            serializationObject[targetPropertyName] = (<ImageProcessingConfiguration>sourceProperty).serialize();
+                            break;
                     }
                 }
             }
@@ -160,6 +175,12 @@
                         case 7:     // Color Curves
                             destination[property] = ColorCurves.Parse(sourceProperty);
                             break;
+                        case 8:     // Color 4
+                            destination[property] = Color4.FromArray(sourceProperty);
+                            break;
+                        case 9:     // Image Processing
+                            destination[property] = ImageProcessingConfiguration.Parse(sourceProperty);
+                            break;
                     }
                 }
             }

+ 16 - 0
src/babylon.scene.ts

@@ -209,6 +209,19 @@
             this.markAllMaterialsAsDirty(Material.TextureDirtyFlag);
         }
 
+        protected _imageProcessingConfiguration: ImageProcessingConfiguration;
+        /**
+         * Default image processing configuration used either in the rendering
+         * Forward main pass or through the imageProcessingPostProcess if present.
+         * As in the majority of the scene they are the same (exception for multi camera),
+         * this is easier to reference from here than from all the materials and post process.
+         * 
+         * No setter as we it is a shared configuration, you can set the values instead.
+         */
+        public get imageProcessingConfiguration(): ImageProcessingConfiguration {
+            return this._imageProcessingConfiguration;
+        }
+
         public forceWireframe = false;
         private _forcePointsCloud = false;
         public set forcePointsCloud(value : boolean) {
@@ -792,6 +805,9 @@
 
             // Uniform Buffer
             this._createUbo();
+
+            // Default Image processing definition.
+            this._imageProcessingConfiguration = new ImageProcessingConfiguration();
         }
 
         // Properties