David Catuhe 9 年之前
父节点
当前提交
4943f93a6c

+ 2 - 1
Tools/Gulp/config.json

@@ -234,7 +234,8 @@
       "../../src/Materials/Textures/babylon.colorGradingTexture.js",
       "../../src/Materials/babylon.colorCurves.js",
       "../../src/Materials/babylon.pbrMaterial.js",      
-      "../../src/Debug/babylon.debugLayer.js"
+      "../../src/Debug/babylon.debugLayer.js",
+      "../../src/PostProcess/babylon.standardRenderingPipeline.js"
     ]
   }
 }

文件差异内容过多而无法显示
+ 24 - 24
dist/preview release/babylon.core.js


文件差异内容过多而无法显示
+ 4564 - 4518
dist/preview release/babylon.d.ts


文件差异内容过多而无法显示
+ 41 - 41
dist/preview release/babylon.js


文件差异内容过多而无法显示
+ 380 - 58
dist/preview release/babylon.max.js


文件差异内容过多而无法显示
+ 42 - 42
dist/preview release/babylon.noworker.js


+ 30 - 40
src/Cameras/VR/babylon.webVRCamera.js

@@ -7,29 +7,28 @@ var BABYLON;
 (function (BABYLON) {
     var WebVRFreeCamera = (function (_super) {
         __extends(WebVRFreeCamera, _super);
-        function WebVRFreeCamera(name, position, scene, compensateDistortion, vrCameraMetrics, webVROptions) {
+        function WebVRFreeCamera(name, position, scene, compensateDistortion, webVROptions) {
             var _this = this;
-            if (compensateDistortion === void 0) { compensateDistortion = true; }
-            if (vrCameraMetrics === void 0) { vrCameraMetrics = BABYLON.VRCameraMetrics.GetDefault(); }
+            if (compensateDistortion === void 0) { compensateDistortion = false; }
             if (webVROptions === void 0) { webVROptions = {}; }
             _super.call(this, name, position, scene);
             this.webVROptions = webVROptions;
             this._vrDevice = null;
             this._cacheState = null;
             this._vrEnabled = false;
-            vrCameraMetrics.compensateDistortion = compensateDistortion;
-            this.setCameraRigMode(BABYLON.Camera.RIG_MODE_VR, { vrCameraMetrics: vrCameraMetrics });
-            //this._getWebVRDevices = this._getWebVRDevices.bind(this);
+            //enable VR
+            this.getEngine().initWebVR();
             if (!this.getEngine().vrDisplaysPromise) {
                 BABYLON.Tools.Error("WebVR is not enabled on your browser");
             }
             else {
                 //TODO get the metrics updated using the device's eye parameters!
+                //TODO also check that the device has the right capabilities!
                 this.getEngine().vrDisplaysPromise.then(function (devices) {
                     if (devices.length > 0) {
                         _this._vrEnabled = true;
                         if (_this.webVROptions.displayName) {
-                            devices.some(function (device) {
+                            var found = devices.some(function (device) {
                                 if (device.displayName === _this.webVROptions.displayName) {
                                     _this._vrDevice = device;
                                     return true;
@@ -38,11 +37,17 @@ var BABYLON;
                                     return false;
                                 }
                             });
+                            if (!found) {
+                                _this._vrDevice = devices[0];
+                                BABYLON.Tools.Warn("Display " + _this.webVROptions.displayName + " was not found. Using " + _this._vrDevice.displayName);
+                            }
                         }
                         else {
                             //choose the first one
                             _this._vrDevice = devices[0];
                         }
+                        //reset the rig parameters.
+                        _this.setCameraRigMode(BABYLON.Camera.RIG_MODE_WEBVR, { vrDisplay: _this._vrDevice });
                     }
                     else {
                         BABYLON.Tools.Error("No WebVR devices found!");
@@ -54,18 +59,18 @@ var BABYLON;
         }
         WebVRFreeCamera.prototype._checkInputs = function () {
             if (this._vrEnabled) {
-                this._cacheState = this._vrDevice.getPose();
-                this.rotationQuaternion.copyFromFloats(this._cacheState.orientation[0], this._cacheState.orientation[1], this._cacheState.orientation[2], this._cacheState.orientation[3]);
-                if (this.webVROptions.trackPosition) {
-                    this.position.copyFromFloats(this._cacheState.position[0], this._cacheState.position[1], -this._cacheState.position[2]);
-                    this.webVROptions.positionScale && this.position.scaleInPlace(this.webVROptions.positionScale);
-                }
-                //Flip in XY plane
-                this.rotationQuaternion.z *= -1;
-                this.rotationQuaternion.w *= -1;
-                if (this._initialQuaternion) {
-                    this._quaternionCache.copyFrom(this.rotationQuaternion);
-                    this._initialQuaternion.multiplyToRef(this.rotationQuaternion, this.rotationQuaternion);
+                var currentPost = this._vrDevice.getPose();
+                //make sure we have data
+                if (currentPost && currentPost.orientation) {
+                    this._cacheState = currentPost;
+                    this.rotationQuaternion.copyFromFloats(this._cacheState.orientation[0], this._cacheState.orientation[1], this._cacheState.orientation[2], this._cacheState.orientation[3]);
+                    if (this.webVROptions.trackPosition && this._cacheState.position) {
+                        this.position.copyFromFloats(this._cacheState.position[0], this._cacheState.position[1], -this._cacheState.position[2]);
+                        this.webVROptions.positionScale && this.position.scaleInPlace(this.webVROptions.positionScale);
+                    }
+                    //Flip in XY plane
+                    this.rotationQuaternion.z *= -1;
+                    this.rotationQuaternion.w *= -1;
                 }
             }
             _super.prototype._checkInputs.call(this);
@@ -84,31 +89,16 @@ var BABYLON;
         };
         WebVRFreeCamera.prototype.requestVRFullscreen = function (requestPointerlock) {
             //Backwards comp.
-            BABYLON.Tools.Warn("requestVRFullscreen is deprecated. Use engine.switchFullscreen() instead");
-            this.getEngine().switchFullscreen(requestPointerlock);
+            BABYLON.Tools.Warn("requestVRFullscreen is deprecated. call attachControl() to start sending frames to the VR display.");
+            //this.getEngine().switchFullscreen(requestPointerlock);
         };
         WebVRFreeCamera.prototype.getTypeName = function () {
             return "WebVRFreeCamera";
         };
-        WebVRFreeCamera.prototype.resetToCurrentRotation = function (axis) {
-            var _this = this;
-            if (axis === void 0) { axis = BABYLON.Axis.Y; }
-            //can only work if this camera has a rotation quaternion already.
-            if (!this.rotationQuaternion)
-                return;
-            if (!this._initialQuaternion) {
-                this._initialQuaternion = new BABYLON.Quaternion();
-            }
-            this._initialQuaternion.copyFrom(this._quaternionCache || this.rotationQuaternion);
-            ['x', 'y', 'z'].forEach(function (axisName) {
-                if (!axis[axisName]) {
-                    _this._initialQuaternion[axisName] = 0;
-                }
-                else {
-                    _this._initialQuaternion[axisName] *= -1;
-                }
-            });
-            this._initialQuaternion.normalize();
+        WebVRFreeCamera.prototype.resetToCurrentRotation = function () {
+            //uses the vrDisplay's "resetPose()".
+            //pitch and roll won't be affected.
+            this._vrDevice.resetPose();
         };
         return WebVRFreeCamera;
     }(BABYLON.FreeCamera));

+ 29 - 0
src/Cameras/babylon.camera.js

@@ -116,6 +116,13 @@ var BABYLON;
             enumerable: true,
             configurable: true
         });
+        Object.defineProperty(Camera, "RIG_MODE_WEBVR", {
+            get: function () {
+                return Camera._RIG_MODE_WEBVR;
+            },
+            enumerable: true,
+            configurable: true
+        });
         /**
          * @param {boolean} fullDetails - support for multiple levels of logging within scene loading
          */
@@ -439,6 +446,22 @@ var BABYLON;
                         this._rigCameras[1]._rigPostProcess = new BABYLON.VRDistortionCorrectionPostProcess("VR_Distort_Compensation_Right", this._rigCameras[1], true, metrics);
                     }
                     break;
+                case Camera.RIG_MODE_WEBVR:
+                    if (rigParams.vrDisplay) {
+                        var leftEye = rigParams.vrDisplay.getEyeParameters('left');
+                        var rightEye = rigParams.vrDisplay.getEyeParameters('right');
+                        this._rigCameras[0].viewport = new BABYLON.Viewport(0, 0, 0.5, 1.0);
+                        this._rigCameras[0].setCameraRigParameter("vrFieldOfView", leftEye.fieldOfView);
+                        this._rigCameras[0].setCameraRigParameter("vrOffsetMatrix", BABYLON.Matrix.Translation(-leftEye.offset[0], leftEye.offset[1], -leftEye.offset[2]));
+                        this._rigCameras[0]._cameraRigParams.vrWorkMatrix = new BABYLON.Matrix();
+                        this._rigCameras[0].getProjectionMatrix = this._getWebVRProjectionMatrix;
+                        this._rigCameras[1].viewport = new BABYLON.Viewport(0.5, 0, 0.5, 1.0);
+                        this._rigCameras[1].setCameraRigParameter("vrFieldOfView", rightEye.fieldOfView);
+                        this._rigCameras[1].setCameraRigParameter("vrOffsetMatrix", BABYLON.Matrix.Translation(-rightEye.offset[0], rightEye.offset[1], -rightEye.offset[2]));
+                        this._rigCameras[1]._cameraRigParams.vrWorkMatrix = new BABYLON.Matrix();
+                        this._rigCameras[1].getProjectionMatrix = this._getWebVRProjectionMatrix;
+                    }
+                    break;
             }
             this._cascadePostProcessesToRigCams();
             this.
@@ -449,6 +472,11 @@ var BABYLON;
             this._cameraRigParams.vrWorkMatrix.multiplyToRef(this._cameraRigParams.vrHMatrix, this._projectionMatrix);
             return this._projectionMatrix;
         };
+        Camera.prototype._getWebVRProjectionMatrix = function () {
+            BABYLON.Matrix.PerspectiveFovWebVRToRef(this._cameraRigParams['vrFieldOfView'], this.minZ, this.maxZ, this._cameraRigParams.vrWorkMatrix);
+            this._cameraRigParams.vrWorkMatrix.multiplyToRef(this._cameraRigParams['vrOffsetMatrix'], this._projectionMatrix);
+            return this._projectionMatrix;
+        };
         Camera.prototype.setCameraRigParameter = function (name, value) {
             if (!this._cameraRigParams) {
                 this._cameraRigParams = {};
@@ -595,6 +623,7 @@ var BABYLON;
         Camera._RIG_MODE_STEREOSCOPIC_SIDEBYSIDE_CROSSEYED = 12;
         Camera._RIG_MODE_STEREOSCOPIC_OVERUNDER = 13;
         Camera._RIG_MODE_VR = 20;
+        Camera._RIG_MODE_WEBVR = 21;
         Camera.ForceAttachControlToAlwaysPreventDefault = false;
         __decorate([
             BABYLON.serializeAsVector3()

+ 2 - 1
src/Cameras/babylon.targetCamera.js

@@ -195,7 +195,7 @@ var BABYLON;
         TargetCamera.prototype.createRigCamera = function (name, cameraIndex) {
             if (this.cameraRigMode !== BABYLON.Camera.RIG_MODE_NONE) {
                 var rigCamera = new TargetCamera(name, this.position.clone(), this.getScene());
-                if (this.cameraRigMode === BABYLON.Camera.RIG_MODE_VR) {
+                if (this.cameraRigMode === BABYLON.Camera.RIG_MODE_VR || this.cameraRigMode === BABYLON.Camera.RIG_MODE_WEBVR) {
                     if (!this.rotationQuaternion) {
                         this.rotationQuaternion = new BABYLON.Quaternion();
                     }
@@ -227,6 +227,7 @@ var BABYLON;
                     camRight.setTarget(this.getTarget());
                     break;
                 case BABYLON.Camera.RIG_MODE_VR:
+                case BABYLON.Camera.RIG_MODE_WEBVR:
                     camLeft.rotationQuaternion.copyFrom(this.rotationQuaternion);
                     camRight.rotationQuaternion.copyFrom(this.rotationQuaternion);
                     camLeft.position.copyFrom(this.position);

+ 7 - 1
src/Canvas2d/babylon.canvas2d.js

@@ -1127,7 +1127,7 @@ var BABYLON;
                     mapSize.width = Math.pow(2, Math.ceil(Math.log(size.width) / Math.log(2)));
                     mapSize.height = Math.pow(2, Math.ceil(Math.log(size.height) / Math.log(2)));
                 }
-                var id = "groupsMapChache" + this._mapCounter + "forCanvas" + this.id;
+                var id = "groupsMapChache" + this._mapCounter++ + "forCanvas" + this.id;
                 map = new BABYLON.MapTexture(id, this._scene, mapSize, useMipMap ? BABYLON.Texture.TRILINEAR_SAMPLINGMODE : BABYLON.Texture.BILINEAR_SAMPLINGMODE, useMipMap);
                 map.hasAlpha = true;
                 map.anisotropicFilteringLevel = 4;
@@ -1149,6 +1149,12 @@ var BABYLON;
                 else {
                     var sprite = new BABYLON.Sprite2D(map, { parent: parent, id: "__cachedSpriteOfGroup__" + group.id, x: group.actualPosition.x, y: group.actualPosition.y, spriteSize: node.contentSize, spriteLocation: node.pos });
                     sprite.origin = group.origin.clone();
+                    sprite.addExternalData("__cachedGroup__", group);
+                    sprite.pointerEventObservable.add(function (e, s) {
+                        if (group.pointerEventObservable !== null) {
+                            group.pointerEventObservable.notifyObservers(e, s.mask);
+                        }
+                    });
                     res.sprite = sprite;
                 }
             }

+ 14 - 7
src/Canvas2d/babylon.group2d.js

@@ -64,7 +64,7 @@ var BABYLON;
             this._viewportPosition = BABYLON.Vector2.Zero();
         }
         Group2D._createCachedCanvasGroup = function (owner) {
-            var g = new Group2D({ parent: owner, id: "__cachedCanvasGroup__", position: BABYLON.Vector2.Zero(), origin: BABYLON.Vector2.Zero(), size: null, isVisible: true });
+            var g = new Group2D({ parent: owner, id: "__cachedCanvasGroup__", position: BABYLON.Vector2.Zero(), origin: BABYLON.Vector2.Zero(), size: null, isVisible: true, isPickable: false });
             return g;
         };
         Group2D.prototype.applyCachedTexture = function (vertexData, material) {
@@ -699,25 +699,32 @@ var BABYLON;
         Group2D.prototype.handleGroupChanged = function (prop) {
             // This method is only for cachedGroup
             var rd = this._renderableData;
-            if (!this.isCachedGroup || !rd._cacheRenderSprite) {
+            if (!rd) {
+                return;
+            }
+            var cachedSprite = rd._cacheRenderSprite;
+            if (!this.isCachedGroup || !cachedSprite) {
                 return;
             }
             // For now we only support these property changes
             // TODO: add more! :)
             if (prop.id === BABYLON.Prim2DBase.actualPositionProperty.id) {
-                rd._cacheRenderSprite.actualPosition = this.actualPosition.clone();
+                cachedSprite.actualPosition = this.actualPosition.clone();
+                if (cachedSprite.position != null) {
+                    cachedSprite.position = cachedSprite.actualPosition.clone();
+                }
             }
             else if (prop.id === BABYLON.Prim2DBase.rotationProperty.id) {
-                rd._cacheRenderSprite.rotation = this.rotation;
+                cachedSprite.rotation = this.rotation;
             }
             else if (prop.id === BABYLON.Prim2DBase.scaleProperty.id) {
-                rd._cacheRenderSprite.scale = this.scale;
+                cachedSprite.scale = this.scale;
             }
             else if (prop.id === BABYLON.Prim2DBase.originProperty.id) {
-                rd._cacheRenderSprite.origin = this.origin.clone();
+                cachedSprite.origin = this.origin.clone();
             }
             else if (prop.id === Group2D.actualSizeProperty.id) {
-                rd._cacheRenderSprite.size = this.actualSize.clone();
+                cachedSprite.size = this.actualSize.clone();
             }
         };
         Group2D.prototype.detectGroupStates = function () {

+ 18 - 0
src/Canvas2d/babylon.prim2dBase.js

@@ -2012,6 +2012,7 @@ var BABYLON;
              * Get the global transformation matrix of the primitive
              */
             get: function () {
+                this._updateLocalTransform();
                 return this._globalTransform;
             },
             enumerable: true,
@@ -2038,6 +2039,7 @@ var BABYLON;
              * Get invert of the global transformation matrix of the primitive
              */
             get: function () {
+                this._updateLocalTransform();
                 return this._invGlobalTransform;
             },
             enumerable: true,
@@ -2193,6 +2195,22 @@ var BABYLON;
             if (!intersectInfo.intersectHidden && !this.isVisible) {
                 return false;
             }
+            var id = this.id;
+            if (id != null && id.indexOf("__cachedSpriteOfGroup__") === 0) {
+                var ownerGroup = this.getExternalData("__cachedGroup__");
+                return ownerGroup.intersect(intersectInfo);
+            }
+            // If we're testing a cachedGroup, we must reject pointer outside its levelBoundingInfo because children primitives could be partially clipped outside so we must not accept them as intersected when it's the case (because they're not visually visible).
+            var isIntersectionTest = false;
+            if (this instanceof BABYLON.Group2D) {
+                var g = this;
+                isIntersectionTest = g.isCachedGroup;
+            }
+            if (isIntersectionTest && !this.levelBoundingInfo.doesIntersect(intersectInfo._localPickPosition)) {
+                // Important to call this before each return to allow a good recursion next time this intersectInfo is reused
+                intersectInfo._exit(firstLevel);
+                return false;
+            }
             // Fast rejection test with boundingInfo
             if (this.isPickable && !this.boundingInfo.doesIntersect(intersectInfo._localPickPosition)) {
                 // Important to call this before each return to allow a good recursion next time this intersectInfo is reused

+ 1 - 1
src/Canvas2d/babylon.renderablePrim2d.js

@@ -401,7 +401,7 @@ var BABYLON;
                     var td = gii.transparentData[0];
                     usedCount += td._partData.usedElementCount;
                 }
-                if (usedCount === 0) {
+                if (usedCount === 0 && gii.modelRenderCache != null) {
                     this.renderGroup._renderableData._renderGroupInstancesInfo.remove(gii.modelRenderCache.modelKey);
                     gii.dispose();
                 }

+ 1 - 1
src/Canvas2d/babylon.sprite2d.js

@@ -214,7 +214,7 @@ var BABYLON;
                             _this.size = texture.getBaseSize();
                         }
                         _this._positioningDirty();
-                        _this._instanceDirtyFlags |= Sprite2D.textureProperty.flagId; // To make sure the sprite is issued again for render
+                        _this._instanceDirtyFlags |= BABYLON.Prim2DBase.originProperty.flagId | Sprite2D.textureProperty.flagId; // To make sure the sprite is issued again for render
                     });
                 }
             }

+ 21 - 0
src/Math/babylon.math.js

@@ -2328,6 +2328,27 @@ var BABYLON;
             result.m[12] = result.m[13] = result.m[15] = 0.0;
             result.m[14] = (znear * zfar) / (znear - zfar);
         };
+        Matrix.PerspectiveFovWebVRToRef = function (fov, znear, zfar, result, isVerticalFovFixed) {
+            if (isVerticalFovFixed === void 0) { isVerticalFovFixed = true; }
+            var upTan = Math.tan(fov.upDegrees * Math.PI / 180.0);
+            var downTan = Math.tan(fov.downDegrees * Math.PI / 180.0);
+            var leftTan = Math.tan(fov.leftDegrees * Math.PI / 180.0);
+            var rightTan = Math.tan(fov.rightDegrees * Math.PI / 180.0);
+            var xScale = 2.0 / (leftTan + rightTan);
+            var yScale = 2.0 / (upTan + downTan);
+            result.m[0] = xScale;
+            result.m[1] = result.m[2] = result.m[3] = result.m[4] = 0.0;
+            result.m[5] = yScale;
+            result.m[6] = result.m[7] = 0.0;
+            result.m[8] = ((leftTan - rightTan) * xScale * 0.5);
+            result.m[9] = -((upTan - downTan) * yScale * 0.5);
+            //result.m[10] = -(znear + zfar) / (zfar - znear);
+            result.m[10] = -zfar / (znear - zfar);
+            result.m[11] = 1.0;
+            result.m[12] = result.m[13] = result.m[15] = 0.0;
+            //result.m[14] = -(2.0 * zfar * znear) / (zfar - znear);
+            result.m[14] = (znear * zfar) / (znear - zfar);
+        };
         Matrix.GetFinalMatrix = function (viewport, world, view, projection, zmin, zmax) {
             var cw = viewport.width;
             var ch = viewport.height;

+ 234 - 0
src/PostProcess/babylon.standardRenderingPipeline.js

@@ -0,0 +1,234 @@
+var __extends = (this && this.__extends) || function (d, b) {
+    for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
+    function __() { this.constructor = d; }
+    d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+};
+var BABYLON;
+(function (BABYLON) {
+    var StandardRenderingPipeline = (function (_super) {
+        __extends(StandardRenderingPipeline, _super);
+        /**
+         * @constructor
+         * @param {string} name - The rendering pipeline name
+         * @param {BABYLON.Scene} scene - The scene linked to this pipeline
+         * @param {any} ratio - The size of the postprocesses (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5)
+         * @param {BABYLON.PostProcess} originalPostProcess - the custom original color post-process. Must be "reusable". Can be null.
+         * @param {BABYLON.Camera[]} cameras - The array of cameras that the rendering pipeline will be attached to
+         */
+        function StandardRenderingPipeline(name, scene, ratio, originalPostProcess, cameras) {
+            var _this = this;
+            if (originalPostProcess === void 0) { originalPostProcess = null; }
+            _super.call(this, scene.getEngine(), name);
+            this.downSampleX4PostProcess = null;
+            this.brightPassPostProcess = null;
+            this.gaussianBlurHPostProcesses = [];
+            this.gaussianBlurVPostProcesses = [];
+            this.textureAdderPostProcess = null;
+            this.depthOfFieldSourcePostProcess = null;
+            this.depthOfFieldPostProcess = null;
+            this.brightThreshold = 1.0;
+            this.gaussianCoefficient = 0.25;
+            this.gaussianMean = 1.0;
+            this.gaussianStandardDeviation = 1.0;
+            this.exposure = 1.0;
+            this.lensTexture = null;
+            this.depthOfFieldDistance = 10.0;
+            this._depthRenderer = null;
+            // Getters and setters
+            this._blurEnabled = true;
+            this._depthOfFieldEnabled = false;
+            // Initialize
+            this._scene = scene;
+            // Create pass post-processe
+            if (!originalPostProcess) {
+                this.originalPostProcess = new BABYLON.PostProcess("HDRPass", "standard", [], [], ratio, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), true, "#define PASS_POST_PROCESS", BABYLON.Engine.TEXTURETYPE_FLOAT);
+            }
+            else {
+                this.originalPostProcess = originalPostProcess;
+            }
+            this.addEffect(new BABYLON.PostProcessRenderEffect(scene.getEngine(), "HDRPassPostProcess", function () { return _this.originalPostProcess; }, true));
+            // Create down sample X4 post-process
+            this._createDownSampleX4PostProcess(scene, ratio / 2);
+            // Create bright pass post-process
+            this._createBrightPassPostProcess(scene, ratio / 2);
+            // Create gaussian blur post-processes (down sampling blurs)
+            this._createGaussianBlurPostProcesses(scene, ratio / 2, 0);
+            this._createGaussianBlurPostProcesses(scene, ratio / 4, 1);
+            this._createGaussianBlurPostProcesses(scene, ratio / 8, 2);
+            this._createGaussianBlurPostProcesses(scene, ratio / 16, 3);
+            // Create texture adder post-process
+            this._createTextureAdderPostProcess(scene, ratio);
+            // Create depth-of-field source post-process
+            this.depthOfFieldSourcePostProcess = new BABYLON.PostProcess("HDRDepthOfFieldSource", "standard", [], [], ratio, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), true, "#define PASS_POST_PROCESS", BABYLON.Engine.TEXTURETYPE_UNSIGNED_INT);
+            this.addEffect(new BABYLON.PostProcessRenderEffect(scene.getEngine(), "HDRDepthOfFieldSource", function () { return _this.depthOfFieldSourcePostProcess; }, true));
+            // Create gaussian blur used by depth-of-field
+            this._createGaussianBlurPostProcesses(scene, ratio / 2, 4);
+            // Create depth-of-field post-process
+            this._createDepthOfFieldPostProcess(scene, ratio);
+            // Finish
+            scene.postProcessRenderPipelineManager.addPipeline(this);
+            if (cameras !== null) {
+                scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(name, cameras);
+            }
+            this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRDepthOfFieldSource", cameras);
+            this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRGaussianBlurH4", cameras);
+            this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRGaussianBlurV4", cameras);
+            this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRDepthOfField", cameras);
+        }
+        Object.defineProperty(StandardRenderingPipeline.prototype, "BlurEnabled", {
+            get: function () {
+                return this._blurEnabled;
+            },
+            set: function (enabled) {
+                if (enabled && !this._blurEnabled || !enabled && this._blurEnabled) {
+                    for (var i = 0; i < this.gaussianBlurHPostProcesses.length - 1; i++) {
+                        if (enabled) {
+                            this._scene.postProcessRenderPipelineManager.enableEffectInPipeline(this._name, "HDRGaussianBlurH" + i, this._scene.cameras);
+                            this._scene.postProcessRenderPipelineManager.enableEffectInPipeline(this._name, "HDRGaussianBlurV" + i, this._scene.cameras);
+                        }
+                        else {
+                            this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRGaussianBlurH" + i, this._scene.cameras);
+                            this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRGaussianBlurV" + i, this._scene.cameras);
+                        }
+                    }
+                }
+                this._blurEnabled = enabled;
+            },
+            enumerable: true,
+            configurable: true
+        });
+        Object.defineProperty(StandardRenderingPipeline.prototype, "DepthOfFieldEnabled", {
+            get: function () {
+                return this._depthOfFieldEnabled;
+            },
+            set: function (enabled) {
+                if (enabled && !this._depthOfFieldEnabled) {
+                    this._scene.postProcessRenderPipelineManager.enableEffectInPipeline(this._name, "HDRDepthOfFieldSource", this._scene.cameras);
+                    this._scene.postProcessRenderPipelineManager.enableEffectInPipeline(this._name, "HDRGaussianBlurH4", this._scene.cameras);
+                    this._scene.postProcessRenderPipelineManager.enableEffectInPipeline(this._name, "HDRGaussianBlurV4", this._scene.cameras);
+                    this._scene.postProcessRenderPipelineManager.enableEffectInPipeline(this._name, "HDRDepthOfField", this._scene.cameras);
+                    this._depthRenderer = this._scene.enableDepthRenderer();
+                }
+                else if (!enabled && this._depthOfFieldEnabled) {
+                    this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRDepthOfFieldSource", this._scene.cameras);
+                    this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRGaussianBlurH4", this._scene.cameras);
+                    this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRGaussianBlurV4", this._scene.cameras);
+                    this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRDepthOfField", this._scene.cameras);
+                }
+                this._depthOfFieldEnabled = enabled;
+            },
+            enumerable: true,
+            configurable: true
+        });
+        // Down Sample X4 Post-Processs
+        StandardRenderingPipeline.prototype._createDownSampleX4PostProcess = function (scene, ratio) {
+            var _this = this;
+            var downSampleX4Offsets = new Array(32);
+            this.downSampleX4PostProcess = new BABYLON.PostProcess("HDRDownSampleX4", "standard", ["dsOffsets"], [], ratio, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DOWN_SAMPLE_X4", BABYLON.Engine.TEXTURETYPE_UNSIGNED_INT);
+            this.downSampleX4PostProcess.onApply = function (effect) {
+                var id = 0;
+                for (var i = -2; i < 2; i++) {
+                    for (var j = -2; j < 2; j++) {
+                        downSampleX4Offsets[id] = (i + 0.5) * (1.0 / _this.downSampleX4PostProcess.width);
+                        downSampleX4Offsets[id + 1] = (j + 0.5) * (1.0 / _this.downSampleX4PostProcess.height);
+                        id += 2;
+                    }
+                }
+                effect.setArray2("dsOffsets", downSampleX4Offsets);
+            };
+            // Add to pipeline
+            this.addEffect(new BABYLON.PostProcessRenderEffect(scene.getEngine(), "HDRDownSampleX4", function () { return _this.downSampleX4PostProcess; }, true));
+        };
+        // Brightpass Post-Process
+        StandardRenderingPipeline.prototype._createBrightPassPostProcess = function (scene, ratio) {
+            var _this = this;
+            var brightOffsets = new Array(8);
+            this.brightPassPostProcess = new BABYLON.PostProcess("HDRBrightPass", "standard", ["dsOffsets", "brightThreshold"], [], ratio, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define BRIGHT_PASS", BABYLON.Engine.TEXTURETYPE_UNSIGNED_INT);
+            this.brightPassPostProcess.onApply = function (effect) {
+                var sU = (1.0 / _this.brightPassPostProcess.width);
+                var sV = (1.0 / _this.brightPassPostProcess.height);
+                brightOffsets[0] = -0.5 * sU;
+                brightOffsets[1] = 0.5 * sV;
+                brightOffsets[2] = 0.5 * sU;
+                brightOffsets[3] = 0.5 * sV;
+                brightOffsets[4] = -0.5 * sU;
+                brightOffsets[5] = -0.5 * sV;
+                brightOffsets[6] = 0.5 * sU;
+                brightOffsets[7] = -0.5 * sV;
+                effect.setArray2("dsOffsets", brightOffsets);
+                effect.setFloat("brightThreshold", _this.brightThreshold);
+            };
+            // Add to pipeline
+            this.addEffect(new BABYLON.PostProcessRenderEffect(scene.getEngine(), "HDRBrightPass", function () { return _this.brightPassPostProcess; }, true));
+        };
+        // Create gaussian blur H&V post-processes
+        StandardRenderingPipeline.prototype._createGaussianBlurPostProcesses = function (scene, ratio, indice) {
+            var _this = this;
+            var blurOffsets = new Array(9);
+            var blurWeights = new Array(9);
+            var uniforms = ["blurOffsets", "blurWeights"];
+            var callback = function (height) {
+                return function (effect) {
+                    // Weights
+                    var x = 0.0;
+                    for (var i = 0; i < 9; i++) {
+                        x = (i - 4.0) / 4.0;
+                        blurWeights[i] =
+                            _this.gaussianCoefficient
+                                * (1.0 / Math.sqrt(2.0 * Math.PI * _this.gaussianStandardDeviation))
+                                * Math.exp((-((x - _this.gaussianMean) * (x - _this.gaussianMean))) / (2.0 * _this.gaussianStandardDeviation * _this.gaussianStandardDeviation));
+                    }
+                    var lastOutputDimensions = {
+                        width: scene.getEngine().getRenderWidth(),
+                        height: scene.getEngine().getRenderHeight()
+                    };
+                    for (var i = 0; i < 9; i++) {
+                        var value = (i - 4.0) * (1.0 / (height === true ? lastOutputDimensions.height : lastOutputDimensions.width));
+                        blurOffsets[i] = value;
+                    }
+                    effect.setArray("blurOffsets", blurOffsets);
+                    effect.setArray("blurWeights", blurWeights);
+                };
+            };
+            // Create horizontal gaussian blur post-processes
+            var gaussianBlurHPostProcess = new BABYLON.PostProcess("HDRGaussianBlurH" + ratio, "standard", uniforms, [], ratio, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define GAUSSIAN_BLUR_H", BABYLON.Engine.TEXTURETYPE_UNSIGNED_INT);
+            gaussianBlurHPostProcess.onApply = callback(false);
+            // Create vertical gaussian blur post-process
+            var gaussianBlurVPostProcess = new BABYLON.PostProcess("HDRGaussianBlurV" + ratio, "standard", uniforms, [], ratio, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define GAUSSIAN_BLUR_V", BABYLON.Engine.TEXTURETYPE_UNSIGNED_INT);
+            gaussianBlurVPostProcess.onApply = callback(true);
+            // Add to pipeline
+            this.addEffect(new BABYLON.PostProcessRenderEffect(scene.getEngine(), "HDRGaussianBlurH" + indice, function () { return gaussianBlurHPostProcess; }, true));
+            this.addEffect(new BABYLON.PostProcessRenderEffect(scene.getEngine(), "HDRGaussianBlurV" + indice, function () { return gaussianBlurVPostProcess; }, true));
+            // Finish
+            this.gaussianBlurHPostProcesses.push(gaussianBlurHPostProcess);
+            this.gaussianBlurVPostProcesses.push(gaussianBlurVPostProcess);
+        };
+        // Create texture adder post-process
+        StandardRenderingPipeline.prototype._createTextureAdderPostProcess = function (scene, ratio) {
+            var _this = this;
+            var lastGaussianBlurPostProcess = this.gaussianBlurVPostProcesses[3];
+            this.textureAdderPostProcess = new BABYLON.PostProcess("HDRTextureAdder", "standard", ["exposure"], ["otherSampler", "lensSampler"], ratio, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), true, "#define TEXTURE_ADDER", BABYLON.Engine.TEXTURETYPE_UNSIGNED_INT);
+            this.textureAdderPostProcess.onApply = function (effect) {
+                effect.setTextureFromPostProcess("otherSampler", _this.originalPostProcess);
+                effect.setTexture("lensSampler", _this.lensTexture);
+                effect.setFloat("exposure", _this.exposure);
+            };
+            // Add to pipeline
+            this.addEffect(new BABYLON.PostProcessRenderEffect(scene.getEngine(), "HDRTextureAdder", function () { return _this.textureAdderPostProcess; }, true));
+        };
+        // Create depth-of-field post-process
+        StandardRenderingPipeline.prototype._createDepthOfFieldPostProcess = function (scene, ratio) {
+            var _this = this;
+            this.depthOfFieldPostProcess = new BABYLON.PostProcess("HDRDepthOfField", "standard", ["distance"], ["otherSampler", "depthSampler"], ratio, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DEPTH_OF_FIELD", BABYLON.Engine.TEXTURETYPE_UNSIGNED_INT);
+            this.depthOfFieldPostProcess.onApply = function (effect) {
+                effect.setTextureFromPostProcess("otherSampler", _this.depthOfFieldSourcePostProcess);
+                effect.setTexture("depthSampler", _this._depthRenderer.getDepthMap());
+                effect.setFloat("distance", _this.depthOfFieldDistance);
+            };
+            // Add to pipeline
+            this.addEffect(new BABYLON.PostProcessRenderEffect(scene.getEngine(), "HDRDepthOfField", function () { return _this.depthOfFieldPostProcess; }, true));
+        };
+        return StandardRenderingPipeline;
+    }(BABYLON.PostProcessRenderPipeline));
+    BABYLON.StandardRenderingPipeline = StandardRenderingPipeline;
+})(BABYLON || (BABYLON = {}));

+ 267 - 0
src/PostProcess/babylon.standardRenderingPipeline.ts

@@ -0,0 +1,267 @@
+module BABYLON {
+    export class StandardRenderingPipeline extends PostProcessRenderPipeline implements IDisposable {
+        // Public members
+        public originalPostProcess: PostProcess;
+        public downSampleX4PostProcess: PostProcess = null;
+        public brightPassPostProcess: PostProcess = null;
+        public gaussianBlurHPostProcesses: PostProcess[] = [];
+        public gaussianBlurVPostProcesses: PostProcess[] = [];
+        public textureAdderPostProcess: PostProcess = null;
+        public depthOfFieldSourcePostProcess: PostProcess = null;
+        public depthOfFieldPostProcess: PostProcess = null;
+
+        public brightThreshold: number = 1.0;
+        
+        public gaussianCoefficient: number = 0.25;
+        public gaussianMean: number = 1.0;
+        public gaussianStandardDeviation: number = 1.0;
+
+        public exposure: number = 1.0;
+        public lensTexture: Texture = null;
+
+        public depthOfFieldDistance: number = 10.0;
+
+        // Private members
+        private _scene: Scene;
+        
+        private _depthRenderer: DepthRenderer = null;
+
+        // Getters and setters
+        private _blurEnabled: boolean = true;
+        private _depthOfFieldEnabled: boolean = false;
+
+        public set BlurEnabled(enabled: boolean) {
+            if (enabled && !this._blurEnabled || !enabled && this._blurEnabled) {
+                for (var i = 0; i < this.gaussianBlurHPostProcesses.length - 1; i++) {
+                    if (enabled) {
+                        this._scene.postProcessRenderPipelineManager.enableEffectInPipeline(this._name, "HDRGaussianBlurH" + i, this._scene.cameras);
+                        this._scene.postProcessRenderPipelineManager.enableEffectInPipeline(this._name, "HDRGaussianBlurV" + i, this._scene.cameras);
+                    }
+                    else {
+                        this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRGaussianBlurH" + i, this._scene.cameras);
+                        this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRGaussianBlurV" + i, this._scene.cameras);
+                    }
+                }
+            }
+
+            this._blurEnabled = enabled;
+        }
+
+        public get BlurEnabled(): boolean {
+            return this._blurEnabled;
+        }
+
+        public set DepthOfFieldEnabled(enabled: boolean) {
+            if (enabled && !this._depthOfFieldEnabled) {
+                this._scene.postProcessRenderPipelineManager.enableEffectInPipeline(this._name, "HDRDepthOfFieldSource", this._scene.cameras);
+                this._scene.postProcessRenderPipelineManager.enableEffectInPipeline(this._name, "HDRGaussianBlurH4", this._scene.cameras);
+                this._scene.postProcessRenderPipelineManager.enableEffectInPipeline(this._name, "HDRGaussianBlurV4", this._scene.cameras);
+                this._scene.postProcessRenderPipelineManager.enableEffectInPipeline(this._name, "HDRDepthOfField", this._scene.cameras);
+                this._depthRenderer = this._scene.enableDepthRenderer();
+            }
+            else if (!enabled && this._depthOfFieldEnabled) {
+                this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRDepthOfFieldSource", this._scene.cameras);
+                this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRGaussianBlurH4", this._scene.cameras);
+                this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRGaussianBlurV4", this._scene.cameras);
+                this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRDepthOfField", this._scene.cameras);
+            }
+
+            this._depthOfFieldEnabled = enabled;
+        }
+
+        public get DepthOfFieldEnabled(): boolean {
+            return this._depthOfFieldEnabled;
+        }
+
+        /**
+         * @constructor
+         * @param {string} name - The rendering pipeline name
+         * @param {BABYLON.Scene} scene - The scene linked to this pipeline
+         * @param {any} ratio - The size of the postprocesses (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5)
+         * @param {BABYLON.PostProcess} originalPostProcess - the custom original color post-process. Must be "reusable". Can be null.
+         * @param {BABYLON.Camera[]} cameras - The array of cameras that the rendering pipeline will be attached to
+         */
+        constructor(name: string, scene: Scene, ratio: number, originalPostProcess: PostProcess = null, cameras?: Camera[]) {
+            super(scene.getEngine(), name);
+
+            // Initialize
+            this._scene = scene;
+
+            // Create pass post-processe
+            if (!originalPostProcess) {
+                this.originalPostProcess = new PostProcess("HDRPass", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), true, "#define PASS_POST_PROCESS", Engine.TEXTURETYPE_FLOAT);
+            }
+            else {
+                this.originalPostProcess = originalPostProcess;
+            }
+
+            this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPassPostProcess", () => { return this.originalPostProcess; }, true));
+
+            // Create down sample X4 post-process
+            this._createDownSampleX4PostProcess(scene, ratio / 2);
+
+            // Create bright pass post-process
+            this._createBrightPassPostProcess(scene, ratio / 2);
+
+            // Create gaussian blur post-processes (down sampling blurs)
+            this._createGaussianBlurPostProcesses(scene, ratio / 2, 0);
+            this._createGaussianBlurPostProcesses(scene, ratio / 4, 1);
+            this._createGaussianBlurPostProcesses(scene, ratio / 8, 2);
+            this._createGaussianBlurPostProcesses(scene, ratio / 16, 3);
+
+            // Create texture adder post-process
+            this._createTextureAdderPostProcess(scene, ratio);
+
+            // Create depth-of-field source post-process
+            this.depthOfFieldSourcePostProcess = new PostProcess("HDRDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), true, "#define PASS_POST_PROCESS", Engine.TEXTURETYPE_UNSIGNED_INT);
+            this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDepthOfFieldSource", () => { return this.depthOfFieldSourcePostProcess; }, true));
+
+            // Create gaussian blur used by depth-of-field
+            this._createGaussianBlurPostProcesses(scene, ratio / 2, 4);
+
+            // Create depth-of-field post-process
+            this._createDepthOfFieldPostProcess(scene, ratio);
+
+            // Finish
+            scene.postProcessRenderPipelineManager.addPipeline(this);
+
+            if (cameras !== null) {
+                scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(name, cameras);
+            }
+
+            this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRDepthOfFieldSource", cameras);
+            this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRGaussianBlurH4", cameras);
+            this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRGaussianBlurV4", cameras);
+            this._scene.postProcessRenderPipelineManager.disableEffectInPipeline(this._name, "HDRDepthOfField", cameras);
+        }
+
+        // Down Sample X4 Post-Processs
+        private _createDownSampleX4PostProcess(scene: Scene, ratio: number): void {
+            var downSampleX4Offsets = new Array<number>(32);
+            this.downSampleX4PostProcess = new PostProcess("HDRDownSampleX4", "standard", ["dsOffsets"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DOWN_SAMPLE_X4", Engine.TEXTURETYPE_UNSIGNED_INT);
+
+            this.downSampleX4PostProcess.onApply = (effect: Effect) => {
+                var id = 0;
+                for (var i = -2; i < 2; i++) {
+                    for (var j = -2; j < 2; j++) {
+                        downSampleX4Offsets[id] = (i + 0.5) * (1.0 / this.downSampleX4PostProcess.width);
+                        downSampleX4Offsets[id + 1] = (j + 0.5) * (1.0 / this.downSampleX4PostProcess.height);
+                        id += 2;
+                    }
+                }
+
+                effect.setArray2("dsOffsets", downSampleX4Offsets);
+            };
+
+            // Add to pipeline
+            this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDownSampleX4", () => { return this.downSampleX4PostProcess; }, true));
+        }
+
+        // Brightpass Post-Process
+        private _createBrightPassPostProcess(scene: Scene, ratio: number): void {
+            var brightOffsets = new Array<number>(8);
+            this.brightPassPostProcess = new PostProcess("HDRBrightPass", "standard", ["dsOffsets", "brightThreshold"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define BRIGHT_PASS", Engine.TEXTURETYPE_UNSIGNED_INT);
+
+            this.brightPassPostProcess.onApply = (effect: Effect) => {
+                var sU = (1.0 / this.brightPassPostProcess.width);
+                var sV = (1.0 / this.brightPassPostProcess.height);
+
+                brightOffsets[0] = -0.5 * sU;
+                brightOffsets[1] = 0.5 * sV;
+                brightOffsets[2] = 0.5 * sU;
+                brightOffsets[3] = 0.5 * sV;
+                brightOffsets[4] = -0.5 * sU;
+                brightOffsets[5] = -0.5 * sV;
+                brightOffsets[6] = 0.5 * sU;
+                brightOffsets[7] = -0.5 * sV;
+                
+                effect.setArray2("dsOffsets", brightOffsets);
+                effect.setFloat("brightThreshold", this.brightThreshold);
+            }
+
+            // Add to pipeline
+            this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBrightPass", () => { return this.brightPassPostProcess; }, true));
+        }
+
+        // Create gaussian blur H&V post-processes
+        private _createGaussianBlurPostProcesses(scene: Scene, ratio: number, indice: number): void {
+            var blurOffsets = new Array<number>(9);
+            var blurWeights = new Array<number>(9);
+            var uniforms: string[] = ["blurOffsets", "blurWeights"];
+
+            var callback = (height: boolean) => {
+                return (effect: Effect) => {
+                    // Weights
+                    var x: number = 0.0;
+
+                    for (var i = 0; i < 9; i++) {
+                        x = (i - 4.0) / 4.0;
+                        blurWeights[i] =
+                            this.gaussianCoefficient
+                            * (1.0 / Math.sqrt(2.0 * Math.PI * this.gaussianStandardDeviation))
+                            * Math.exp((-((x - this.gaussianMean) * (x - this.gaussianMean))) / (2.0 * this.gaussianStandardDeviation * this.gaussianStandardDeviation));
+                    }
+                    
+                    var lastOutputDimensions: any = {
+                        width: scene.getEngine().getRenderWidth(),
+                        height: scene.getEngine().getRenderHeight()
+                    };
+
+                    for (var i = 0; i < 9; i++) {
+                        var value = (i - 4.0) * (1.0 / (height === true ? lastOutputDimensions.height : lastOutputDimensions.width));
+                        blurOffsets[i] = value;
+                    }
+
+                    effect.setArray("blurOffsets", blurOffsets);
+                    effect.setArray("blurWeights", blurWeights);
+                };
+            };
+
+            // Create horizontal gaussian blur post-processes
+            var gaussianBlurHPostProcess = new PostProcess("HDRGaussianBlurH" + ratio, "standard", uniforms, [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define GAUSSIAN_BLUR_H", Engine.TEXTURETYPE_UNSIGNED_INT);
+            gaussianBlurHPostProcess.onApply = callback(false);
+
+            // Create vertical gaussian blur post-process
+            var gaussianBlurVPostProcess = new PostProcess("HDRGaussianBlurV" + ratio, "standard", uniforms, [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define GAUSSIAN_BLUR_V", Engine.TEXTURETYPE_UNSIGNED_INT);
+            gaussianBlurVPostProcess.onApply = callback(true);
+
+            // Add to pipeline
+            this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRGaussianBlurH" + indice, () => { return gaussianBlurHPostProcess; }, true));
+            this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRGaussianBlurV" + indice, () => { return gaussianBlurVPostProcess; }, true));
+
+            // Finish
+            this.gaussianBlurHPostProcesses.push(gaussianBlurHPostProcess);
+            this.gaussianBlurVPostProcesses.push(gaussianBlurVPostProcess);
+        }
+
+        // Create texture adder post-process
+        private _createTextureAdderPostProcess(scene: Scene, ratio: number): void {
+            var lastGaussianBlurPostProcess = this.gaussianBlurVPostProcesses[3];
+
+            this.textureAdderPostProcess = new PostProcess("HDRTextureAdder", "standard", ["exposure"], ["otherSampler", "lensSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), true, "#define TEXTURE_ADDER", Engine.TEXTURETYPE_UNSIGNED_INT);
+            this.textureAdderPostProcess.onApply = (effect: Effect) => {
+                effect.setTextureFromPostProcess("otherSampler", this.originalPostProcess);
+                effect.setTexture("lensSampler", this.lensTexture);
+
+                effect.setFloat("exposure", this.exposure);
+            };
+
+            // Add to pipeline
+            this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRTextureAdder", () => { return this.textureAdderPostProcess; }, true));
+        }
+
+        // Create depth-of-field post-process
+        private _createDepthOfFieldPostProcess(scene: Scene, ratio: number): void {
+            this.depthOfFieldPostProcess = new PostProcess("HDRDepthOfField", "standard", ["distance"], ["otherSampler", "depthSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DEPTH_OF_FIELD", Engine.TEXTURETYPE_UNSIGNED_INT);
+            this.depthOfFieldPostProcess.onApply = (effect: Effect) => {
+                effect.setTextureFromPostProcess("otherSampler", this.depthOfFieldSourcePostProcess);
+                effect.setTexture("depthSampler", this._depthRenderer.getDepthMap());
+
+                effect.setFloat("distance", this.depthOfFieldDistance);
+            };
+
+            // Add to pipeline
+            this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDepthOfField", () => { return this.depthOfFieldPostProcess; }, true));
+        }
+    }
+}

+ 138 - 0
src/Shaders/standard.fragment.fx

@@ -0,0 +1,138 @@
+uniform sampler2D textureSampler;
+varying vec2 vUV;
+
+#if defined(PASS_POST_PROCESS)
+void main(void)
+{
+	vec4 color = texture2D(textureSampler, vUV);
+	gl_FragColor = color;
+}
+#endif
+
+#if defined(DOWN_SAMPLE_X4)
+uniform vec2 dsOffsets[16];
+
+void main(void)
+{
+	vec4 average = vec4(0.0, 0.0, 0.0, 0.0);
+
+	average = texture2D(textureSampler, vUV + dsOffsets[0]);
+	average += texture2D(textureSampler, vUV + dsOffsets[1]);
+	average += texture2D(textureSampler, vUV + dsOffsets[2]);
+	average += texture2D(textureSampler, vUV + dsOffsets[3]);
+	average += texture2D(textureSampler, vUV + dsOffsets[4]);
+	average += texture2D(textureSampler, vUV + dsOffsets[5]);
+	average += texture2D(textureSampler, vUV + dsOffsets[6]);
+	average += texture2D(textureSampler, vUV + dsOffsets[7]);
+	average += texture2D(textureSampler, vUV + dsOffsets[8]);
+	average += texture2D(textureSampler, vUV + dsOffsets[9]);
+	average += texture2D(textureSampler, vUV + dsOffsets[10]);
+	average += texture2D(textureSampler, vUV + dsOffsets[11]);
+	average += texture2D(textureSampler, vUV + dsOffsets[12]);
+	average += texture2D(textureSampler, vUV + dsOffsets[13]);
+	average += texture2D(textureSampler, vUV + dsOffsets[14]);
+	average += texture2D(textureSampler, vUV + dsOffsets[15]);
+
+	average /= 16.0;
+
+	gl_FragColor = average;
+}
+#endif
+
+#if defined(BRIGHT_PASS)
+uniform vec2 dsOffsets[4];
+uniform float brightThreshold;
+
+void main(void)
+{
+	vec4 average = vec4(0.0, 0.0, 0.0, 0.0);
+
+	average = texture2D(textureSampler, vUV + vec2(dsOffsets[0].x, dsOffsets[0].y));
+	average += texture2D(textureSampler, vUV + vec2(dsOffsets[1].x, dsOffsets[1].y));
+	average += texture2D(textureSampler, vUV + vec2(dsOffsets[2].x, dsOffsets[2].y));
+	average += texture2D(textureSampler, vUV + vec2(dsOffsets[3].x, dsOffsets[3].y));
+
+	average *= 0.25;
+
+	float luminance = length(average.rgb);
+
+	if (luminance < brightThreshold) {
+		average = vec4(0.0, 0.0, 0.0, 1.0);
+	}
+
+	gl_FragColor = average;
+}
+#endif
+
+#if defined(GAUSSIAN_BLUR_H) || defined(GAUSSIAN_BLUR_V)
+uniform float blurOffsets[9];
+uniform float blurWeights[9];
+
+void main(void)
+{
+	vec4 color = vec4(0.0, 0.0, 0.0, 0.0);
+
+	for (int i = 0; i < 9; i++) {
+		#ifdef GAUSSIAN_BLUR_H
+		color += (texture2D(textureSampler, vUV + vec2(blurOffsets[i] * 2.0, 0.0)) * blurWeights[i]);
+		color += (texture2D(textureSampler, vUV - vec2(blurOffsets[i] * 2.0, 0.0)) * blurWeights[i]);
+		#else
+		color += (texture2D(textureSampler, vUV + vec2(0.0, blurOffsets[i] * 2.0)) * blurWeights[i]);
+		color += (texture2D(textureSampler, vUV - vec2(0.0, blurOffsets[i] * 2.0)) * blurWeights[i]);
+		#endif
+	}
+
+	color.a = 1.0;
+	gl_FragColor = color;
+}
+#endif
+
+#if defined(TEXTURE_ADDER)
+uniform sampler2D otherSampler;
+uniform sampler2D lensSampler;
+
+uniform float exposure;
+
+void main(void)
+{
+	vec3 colour = texture2D(textureSampler, vUV).rgb;
+	
+	colour *= exposure;
+
+	vec3 X = max(vec3(0.0, 0.0, 0.0), colour - 0.004);
+	vec3 retColor = (X * (6.2 * X + 0.5)) / (X * (6.2 * X + 1.7) + 0.06);
+
+	colour = retColor * retColor;
+	colour += colour * texture2D(lensSampler, vUV).rgb;
+
+	gl_FragColor = vec4(colour.rgb, 1.0) + texture2D(otherSampler, vUV);
+}
+#endif
+
+#if defined(DEPTH_OF_FIELD)
+uniform sampler2D otherSampler;
+uniform sampler2D depthSampler;
+
+uniform float distance;
+
+void main(void)
+{
+	vec4 sharp = texture2D(otherSampler, vUV);
+	vec4 blur = texture2D(textureSampler, vUV);
+	float dist = clamp(texture2D(depthSampler, vUV).r * distance, 0.0, 1.0);
+	float factor = 0.0;
+
+	if (dist < 0.05)
+        factor = 1.0;
+    else if (dist < 0.1)
+        factor = 20.0 * (0.1 - dist);
+    else if (dist < 0.5)
+        factor=0.0;
+    else
+        factor = 2.0 * (dist - 0.5);
+
+    factor = clamp(factor, 0.0, 0.90);
+    gl_FragColor = mix(sharp, blur, factor);
+}
+
+#endif

+ 4 - 1
src/Tools/babylon.tools.js

@@ -206,7 +206,10 @@ var BABYLON;
          */
         Tools.QueueNewFrame = function (func, requester) {
             if (requester === void 0) { requester = window; }
-            if (requester.requestAnimationFrame)
+            //if WebVR is enabled AND presenting, requestAnimationFrame is triggered when enabled.
+            /*if(requester.isPresenting) {
+                return;
+            } else*/ if (requester.requestAnimationFrame)
                 requester.requestAnimationFrame(func);
             else if (requester.msRequestAnimationFrame)
                 requester.msRequestAnimationFrame(func);

+ 15 - 4
src/babylon.engine.js

@@ -168,6 +168,9 @@ var BABYLON;
                     //get the old size before we change
                     _this._oldSize = new BABYLON.Size(_this.getRenderWidth(), _this.getRenderHeight());
                     _this._oldHardwareScaleFactor = _this.getHardwareScalingLevel();
+                    //according to the WebVR specs, requestAnimationFrame should be triggered only once.
+                    //But actually, no browser follow the specs...
+                    //this._vrAnimationFrameHandler = this._vrDisplayEnabled.requestAnimationFrame(this._bindedRenderFunction);
                     //get the width and height, change the render size
                     var leftEye = _this._vrDisplayEnabled.getEyeParameters('left');
                     var width, height;
@@ -175,8 +178,11 @@ var BABYLON;
                     _this.setSize(leftEye.renderWidth * 2, leftEye.renderHeight);
                 }
                 else {
+                    //When the specs are implemented, need to uncomment this.
+                    //this._vrDisplayEnabled.cancelAnimationFrame(this._vrAnimationFrameHandler);
                     _this.setHardwareScalingLevel(_this._oldHardwareScaleFactor);
                     _this.setSize(_this._oldSize.width, _this._oldSize.height);
+                    _this._vrDisplayEnabled = undefined;
                 }
             };
             this._renderingCanvas = canvas;
@@ -315,7 +321,9 @@ var BABYLON;
             //default loading screen
             this._loadingScreen = new BABYLON.DefaultLoadingScreen(this._renderingCanvas);
             //Load WebVR Devices
-            this._getVRDisplays();
+            if (options.autoEnableWebVR) {
+                this.initWebVR();
+            }
             BABYLON.Tools.Log("Babylon.js engine (v" + Engine.Version + ") launched");
         }
         Object.defineProperty(Engine, "ALPHA_DISABLE", {
@@ -789,15 +797,18 @@ var BABYLON;
             }
         };
         //WebVR functions
+        Engine.prototype.initWebVR = function () {
+            if (!this.vrDisplaysPromise) {
+                this._getVRDisplays();
+            }
+        };
         Engine.prototype.enableVR = function (vrDevice) {
             this._vrDisplayEnabled = vrDevice;
             this._vrDisplayEnabled.requestPresent([{ source: this.getRenderingCanvas() }]).then(this._onVRFullScreenTriggered);
         };
         Engine.prototype.disableVR = function () {
             if (this._vrDisplayEnabled) {
-                this._vrDisplayEnabled.exitPresent();
-                this._vrDisplayEnabled = null;
-                this._onVRFullScreenTriggered();
+                this._vrDisplayEnabled.exitPresent().then(this._onVRFullScreenTriggered);
             }
         };
         Engine.prototype._getVRDisplays = function () {