浏览代码

Merge pull request #5051 from julien-moreau/master

Added FXAA and MSAA support to StandardRenderingPipeline
sebavan 7 年之前
父节点
当前提交
9c7199a5d5

+ 1 - 0
dist/preview release/what's new.md

@@ -117,6 +117,7 @@
 - Improved _isSyncronized performance and reduced GC in TransformNode.computeWorldMatrix by directly reading property. ([Bolloxim](https://github.com/Bolloxim))
 - Improved _isSyncronized performance and reduced GC in TransformNode.computeWorldMatrix by directly reading property. ([Bolloxim](https://github.com/Bolloxim))
 - Added supports for reflectionMatrix in Skybox Mode Cube Texture allowing offsetting the world center or rotating the matrix ([sebavan](http://www.github.com/sebavan))
 - Added supports for reflectionMatrix in Skybox Mode Cube Texture allowing offsetting the world center or rotating the matrix ([sebavan](http://www.github.com/sebavan))
 - Improved performance of cached nodes but ensuring parent always updates cache. This removes failed isSynchronized test that meant computeWorldMatrix would always have to rebuild. On large scenes this could double framerate. ([Bolloxim](https://github.com/Bolloxim))
 - Improved performance of cached nodes but ensuring parent always updates cache. This removes failed isSynchronized test that meant computeWorldMatrix would always have to rebuild. On large scenes this could double framerate. ([Bolloxim](https://github.com/Bolloxim))
+- Added FXAA and MSAA support to the StandardRenderingPipeline ([julien-moreau](https://github.com/julien-moreau))
 
 
 ### glTF Loader
 ### glTF Loader
 
 

+ 221 - 8
src/PostProcess/RenderPipeline/Pipelines/babylon.standardRenderingPipeline.ts

@@ -4,89 +4,227 @@
         * Public members
         * Public members
         */
         */
         // Post-processes
         // Post-processes
+        /**
+         * Post-process which contains the original scene color before the pipeline applies all the effects
+         */
         public originalPostProcess: Nullable<PostProcess>;
         public originalPostProcess: Nullable<PostProcess>;
+        /**
+         * Post-process used to down scale an image x4
+         */
         public downSampleX4PostProcess: Nullable<PostProcess> = null;
         public downSampleX4PostProcess: Nullable<PostProcess> = null;
+        /**
+         * Post-process used to calculate the illuminated surfaces controlled by a threshold
+         */
         public brightPassPostProcess: Nullable<PostProcess> = null;
         public brightPassPostProcess: Nullable<PostProcess> = null;
+        /**
+         * Post-process array storing all the horizontal blur post-processes used by the pipeline
+         */
         public blurHPostProcesses: PostProcess[] = [];
         public blurHPostProcesses: PostProcess[] = [];
+        /**
+         * Post-process array storing all the vertical blur post-processes used by the pipeline
+         */
         public blurVPostProcesses: PostProcess[] = [];
         public blurVPostProcesses: PostProcess[] = [];
+        /**
+         * Post-process used to add colors of 2 textures (typically brightness + real scene color)
+         */
         public textureAdderPostProcess: Nullable<PostProcess> = null;
         public textureAdderPostProcess: Nullable<PostProcess> = null;
 
 
+        /**
+         * Post-process used to create volumetric lighting effect
+         */
         public volumetricLightPostProcess: Nullable<PostProcess> = null;
         public volumetricLightPostProcess: Nullable<PostProcess> = null;
+        /**
+         * Post-process used to smooth the previous volumetric light post-process on the X axis
+         */
         public volumetricLightSmoothXPostProcess: Nullable<BlurPostProcess> = null;
         public volumetricLightSmoothXPostProcess: Nullable<BlurPostProcess> = null;
+        /**
+         * Post-process used to smooth the previous volumetric light post-process on the Y axis
+         */
         public volumetricLightSmoothYPostProcess: Nullable<BlurPostProcess> = null;
         public volumetricLightSmoothYPostProcess: Nullable<BlurPostProcess> = null;
+        /**
+         * Post-process used to merge the volumetric light effect and the real scene color
+         */
         public volumetricLightMergePostProces: Nullable<PostProcess> = null;
         public volumetricLightMergePostProces: Nullable<PostProcess> = null;
+        /**
+         * Post-process used to store the final volumetric light post-process (attach/detach for debug purpose)
+         */
         public volumetricLightFinalPostProcess: Nullable<PostProcess> = null;
         public volumetricLightFinalPostProcess: Nullable<PostProcess> = null;
 
 
+        /**
+         * Base post-process used to calculate the average luminance of the final image for HDR
+         */
         public luminancePostProcess: Nullable<PostProcess> = null;
         public luminancePostProcess: Nullable<PostProcess> = null;
+        /**
+         * Post-processes used to create down sample post-processes in order to get
+         * the average luminance of the final image for HDR
+         * Array of length "StandardRenderingPipeline.LuminanceSteps"
+         */
         public luminanceDownSamplePostProcesses: PostProcess[] = [];
         public luminanceDownSamplePostProcesses: PostProcess[] = [];
+        /**
+         * Post-process used to create a HDR effect (light adaptation)
+         */
         public hdrPostProcess: Nullable<PostProcess> = null;
         public hdrPostProcess: Nullable<PostProcess> = null;
-
+        /**
+         * Post-process used to store the final texture adder post-process (attach/detach for debug purpose)
+         */
         public textureAdderFinalPostProcess: Nullable<PostProcess> = null;
         public textureAdderFinalPostProcess: Nullable<PostProcess> = null;
+        /**
+         * Post-process used to store the final lens flare post-process (attach/detach for debug purpose)
+         */
         public lensFlareFinalPostProcess: Nullable<PostProcess> = null;
         public lensFlareFinalPostProcess: Nullable<PostProcess> = null;
+        /**
+         * Post-process used to merge the final HDR post-process and the real scene color
+         */
         public hdrFinalPostProcess: Nullable<PostProcess> = null;
         public hdrFinalPostProcess: Nullable<PostProcess> = null;
-
+        /**
+         * Post-process used to create a lens flare effect
+         */
         public lensFlarePostProcess: Nullable<PostProcess> = null;
         public lensFlarePostProcess: Nullable<PostProcess> = null;
+        /**
+         * Post-process that merges the result of the lens flare post-process and the real scene color
+         */
         public lensFlareComposePostProcess: Nullable<PostProcess> = null;
         public lensFlareComposePostProcess: Nullable<PostProcess> = null;
-
+        /**
+         * Post-process used to create a motion blur effect
+         */
         public motionBlurPostProcess: Nullable<PostProcess> = null;
         public motionBlurPostProcess: Nullable<PostProcess> = null;
-
+        /**
+         * Post-process used to create a depth of field effect
+         */
         public depthOfFieldPostProcess: Nullable<PostProcess> = null;
         public depthOfFieldPostProcess: Nullable<PostProcess> = null;
+        /**
+         * The Fast Approximate Anti-Aliasing post process which attemps to remove aliasing from an image.
+         */
+        public fxaaPostProcess: Nullable<FxaaPostProcess> = null;
 
 
         // Values
         // Values
+
+        /**
+         * Represents the brightness threshold in order to configure the illuminated surfaces
+         */
         @serialize()
         @serialize()
         public brightThreshold: number = 1.0;
         public brightThreshold: number = 1.0;
 
 
+        /**
+         * Configures the blur intensity used for surexposed surfaces are highlighted surfaces (light halo)
+         */
         @serialize()
         @serialize()
         public blurWidth: number = 512.0;
         public blurWidth: number = 512.0;
+        /**
+         * Sets if the blur for highlighted surfaces must be only horizontal
+         */
         @serialize()
         @serialize()
         public horizontalBlur: boolean = false;
         public horizontalBlur: boolean = false;
 
 
+        /**
+         * Sets the overall exposure used by the pipeline
+         */
         @serialize()
         @serialize()
         public exposure: number = 1.0;
         public exposure: number = 1.0;
+
+        /**
+         * Texture used typically to simulate "dirty" on camera lens
+         */
         @serializeAsTexture("lensTexture")
         @serializeAsTexture("lensTexture")
         public lensTexture: Nullable<Texture> = null;
         public lensTexture: Nullable<Texture> = null;
 
 
+        /**
+         * Represents the offset coefficient based on Rayleigh principle. Typically in interval [-0.2, 0.2]
+         */
         @serialize()
         @serialize()
         public volumetricLightCoefficient: number = 0.2;
         public volumetricLightCoefficient: number = 0.2;
+        /**
+         * The overall power of volumetric lights, typically in interval [0, 10] maximum
+         */
         @serialize()
         @serialize()
         public volumetricLightPower: number = 4.0;
         public volumetricLightPower: number = 4.0;
+        /**
+         * Used the set the blur intensity to smooth the volumetric lights
+         */
         @serialize()
         @serialize()
         public volumetricLightBlurScale: number = 64.0;
         public volumetricLightBlurScale: number = 64.0;
-
+        /**
+         * Light (spot or directional) used to generate the volumetric lights rays
+         * The source light must have a shadow generate so the pipeline can get its 
+         * depth map
+         */
         public sourceLight: Nullable<SpotLight |  DirectionalLight> = null;
         public sourceLight: Nullable<SpotLight |  DirectionalLight> = null;
 
 
+        /**
+         * For eye adaptation, represents the minimum luminance the eye can see
+         */
         @serialize()
         @serialize()
         public hdrMinimumLuminance: number = 1.0;
         public hdrMinimumLuminance: number = 1.0;
+        /**
+         * For eye adaptation, represents the decrease luminance speed
+         */
         @serialize()
         @serialize()
         public hdrDecreaseRate: number = 0.5;
         public hdrDecreaseRate: number = 0.5;
+        /**
+         * For eye adaptation, represents the increase luminance speed
+         */
         @serialize()
         @serialize()
         public hdrIncreaseRate: number = 0.5;
         public hdrIncreaseRate: number = 0.5;
 
 
+        /**
+         * Lens color texture used by the lens flare effect. Mandatory if lens flare effect enabled
+         */
         @serializeAsTexture("lensColorTexture")
         @serializeAsTexture("lensColorTexture")
         public lensColorTexture: Nullable<Texture> = null;
         public lensColorTexture: Nullable<Texture> = null;
+        /**
+         * The overall strengh for the lens flare effect
+         */
         @serialize()
         @serialize()
         public lensFlareStrength: number = 20.0;
         public lensFlareStrength: number = 20.0;
+        /**
+         * Dispersion coefficient for lens flare ghosts
+         */
         @serialize()
         @serialize()
         public lensFlareGhostDispersal: number = 1.4;
         public lensFlareGhostDispersal: number = 1.4;
+        /**
+         * Main lens flare halo width
+         */
         @serialize()
         @serialize()
         public lensFlareHaloWidth: number = 0.7;
         public lensFlareHaloWidth: number = 0.7;
+        /**
+         * Based on the lens distortion effect, defines how much the lens flare result
+         * is distorted
+         */
         @serialize()
         @serialize()
         public lensFlareDistortionStrength: number = 16.0;
         public lensFlareDistortionStrength: number = 16.0;
+        /**
+         * Lens star texture must be used to simulate rays on the flares and is available
+         * in the documentation
+         */
         @serializeAsTexture("lensStarTexture")
         @serializeAsTexture("lensStarTexture")
         public lensStarTexture: Nullable<Texture> = null;
         public lensStarTexture: Nullable<Texture> = null;
+        /**
+         * As the "lensTexture" (can be the same texture or different), it is used to apply the lens
+         * flare effect by taking account of the dirt texture
+         */
         @serializeAsTexture("lensFlareDirtTexture")
         @serializeAsTexture("lensFlareDirtTexture")
         public lensFlareDirtTexture: Nullable<Texture> = null;
         public lensFlareDirtTexture: Nullable<Texture> = null;
 
 
+        /**
+         * Represents the focal length for the depth of field effect
+         */
         @serialize()
         @serialize()
         public depthOfFieldDistance: number = 10.0;
         public depthOfFieldDistance: number = 10.0;
-
+        /**
+         * Represents the blur intensity for the blurred part of the depth of field effect
+         */
         @serialize()
         @serialize()
         public depthOfFieldBlurWidth: number = 64.0;
         public depthOfFieldBlurWidth: number = 64.0;
 
 
+        /**
+         * For motion blur, defines how much the image is blurred by the movement
+         */
         @serialize()
         @serialize()
         public motionStrength: number = 1.0;
         public motionStrength: number = 1.0;
 
 
-        // IAnimatable
+        /**
+         * List of animations for the pipeline (IAnimatable implementation)
+         */
         public animations: Animation[] = [];
         public animations: Animation[] = [];
 
 
         /**
         /**
@@ -104,16 +242,21 @@
         private _ratio: number;
         private _ratio: number;
 
 
         // Getters and setters
         // Getters and setters
-        private _bloomEnabled: boolean = true;
+        private _bloomEnabled: boolean = false;
         private _depthOfFieldEnabled: boolean = false;
         private _depthOfFieldEnabled: boolean = false;
         private _vlsEnabled: boolean = false;
         private _vlsEnabled: boolean = false;
         private _lensFlareEnabled: boolean = false;
         private _lensFlareEnabled: boolean = false;
         private _hdrEnabled: boolean = false;
         private _hdrEnabled: boolean = false;
         private _motionBlurEnabled: boolean = false;
         private _motionBlurEnabled: boolean = false;
+        private _fxaaEnabled: boolean = false;
 
 
         private _motionBlurSamples: number = 64.0;
         private _motionBlurSamples: number = 64.0;
         private _volumetricLightStepsCount: number = 50.0;
         private _volumetricLightStepsCount: number = 50.0;
+        private _samples: number = 1;
 
 
+        /**
+         * Specifies if the bloom pipeline is enabled
+         */
         @serialize()
         @serialize()
         public get BloomEnabled(): boolean {
         public get BloomEnabled(): boolean {
             return this._bloomEnabled;
             return this._bloomEnabled;
@@ -128,6 +271,9 @@
             this._buildPipeline();
             this._buildPipeline();
         }
         }
 
 
+        /**
+         * Specifies if the depth of field pipeline is enabed
+         */
         @serialize()
         @serialize()
         public get DepthOfFieldEnabled(): boolean {
         public get DepthOfFieldEnabled(): boolean {
             return this._depthOfFieldEnabled;
             return this._depthOfFieldEnabled;
@@ -142,6 +288,9 @@
             this._buildPipeline();
             this._buildPipeline();
         }
         }
 
 
+        /**
+         * Specifies if the lens flare pipeline is enabed
+         */
         @serialize()
         @serialize()
         public get LensFlareEnabled(): boolean {
         public get LensFlareEnabled(): boolean {
             return this._lensFlareEnabled;
             return this._lensFlareEnabled;
@@ -156,6 +305,9 @@
             this._buildPipeline();
             this._buildPipeline();
         }
         }
 
 
+        /**
+         * Specifies if the HDR pipeline is enabled
+         */
         @serialize()
         @serialize()
         public get HDREnabled(): boolean {
         public get HDREnabled(): boolean {
             return this._hdrEnabled;
             return this._hdrEnabled;
@@ -170,6 +322,9 @@
             this._buildPipeline();
             this._buildPipeline();
         }
         }
 
 
+        /**
+         * Specifies if the volumetric lights scattering effect is enabled
+         */
         @serialize()
         @serialize()
         public get VLSEnabled(): boolean {
         public get VLSEnabled(): boolean {
             return this._vlsEnabled;
             return this._vlsEnabled;
@@ -192,6 +347,9 @@
             this._buildPipeline();
             this._buildPipeline();
         }
         }
 
 
+        /**
+         * Specifies if the motion blur effect is enabled
+         */
         @serialize()
         @serialize()
         public get MotionBlurEnabled(): boolean {
         public get MotionBlurEnabled(): boolean {
             return this._motionBlurEnabled;
             return this._motionBlurEnabled;
@@ -206,6 +364,27 @@
             this._buildPipeline();
             this._buildPipeline();
         }
         }
 
 
+        /**
+         * Specifies if anti-aliasing is enabled
+         */
+        @serialize()
+        public get fxaaEnabled(): boolean {
+            return this._fxaaEnabled;
+        }
+
+        public set fxaaEnabled(enabled: boolean) {
+            if (this._fxaaEnabled === enabled) {
+                return;
+            }
+
+            this._fxaaEnabled = enabled;
+            this._buildPipeline();
+        }
+
+        /**
+         * Specifies the number of steps used to calculate the volumetric lights
+         * Typically in interval [50, 200]
+         */
         @serialize()
         @serialize()
         public get volumetricLightStepsCount(): number {
         public get volumetricLightStepsCount(): number {
             return this._volumetricLightStepsCount;
             return this._volumetricLightStepsCount;
@@ -219,6 +398,10 @@
             this._volumetricLightStepsCount = count;
             this._volumetricLightStepsCount = count;
         }
         }
 
 
+        /**
+         * Specifies the number of samples used for the motion blur effect
+         * Typically in interval [16, 64]
+         */
         @serialize()
         @serialize()
         public get motionBlurSamples(): number {
         public get motionBlurSamples(): number {
             return this._motionBlurSamples;
             return this._motionBlurSamples;
@@ -233,6 +416,23 @@
         }
         }
 
 
         /**
         /**
+         * Specifies MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1)
+         */
+        @serialize()
+        public get samples(): number {
+            return this._samples;
+        }
+
+        public set samples(sampleCount: number) {
+            if (this._samples === sampleCount) {
+                return;
+            }
+
+            this._samples = sampleCount;
+            this._buildPipeline();
+        }
+
+        /**
          * @constructor
          * @constructor
          * @param {string} name - The rendering pipeline name
          * @param {string} name - The rendering pipeline name
          * @param {BABYLON.Scene} scene - The scene linked to this pipeline
          * @param {BABYLON.Scene} scene - The scene linked to this pipeline
@@ -340,9 +540,19 @@
                 this._createMotionBlurPostProcess(scene, ratio);
                 this._createMotionBlurPostProcess(scene, ratio);
             }
             }
 
 
+            if (this._fxaaEnabled) {
+                // Create fxaa post-process
+                this.fxaaPostProcess = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Engine.TEXTURETYPE_UNSIGNED_INT);
+                this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRFxaa", () => { return this.fxaaPostProcess; }, true));
+            }
+
             if (this._cameras !== null) {
             if (this._cameras !== null) {
                 this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
                 this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
             }
             }
+
+            if (!this._enableMSAAOnFirstPostProcess(this._samples) && this._samples > 1){
+                BABYLON.Tools.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0");
+            }
         }
         }
 
 
         // Down Sample X4 Post-Processs
         // Down Sample X4 Post-Processs
@@ -780,6 +990,8 @@
 
 
                 if (this.motionBlurPostProcess) { this.motionBlurPostProcess.dispose(camera); }
                 if (this.motionBlurPostProcess) { this.motionBlurPostProcess.dispose(camera); }
 
 
+                if (this.fxaaPostProcess) { this.fxaaPostProcess.dispose(camera); }
+
                 for (var j = 0; j < this.blurHPostProcesses.length; j++) {
                 for (var j = 0; j < this.blurHPostProcesses.length; j++) {
                     this.blurHPostProcesses[j].dispose(camera);
                     this.blurHPostProcesses[j].dispose(camera);
                 }
                 }
@@ -806,6 +1018,7 @@
             this.hdrFinalPostProcess = null;
             this.hdrFinalPostProcess = null;
             this.depthOfFieldPostProcess = null;
             this.depthOfFieldPostProcess = null;
             this.motionBlurPostProcess = null;
             this.motionBlurPostProcess = null;
+            this.fxaaPostProcess = null;
 
 
             this.luminanceDownSamplePostProcesses = [];
             this.luminanceDownSamplePostProcesses = [];
             this.blurHPostProcesses = [];
             this.blurHPostProcesses = [];