|
@@ -4,89 +4,227 @@
|
|
|
* Public members
|
|
|
*/
|
|
|
// Post-processes
|
|
|
+ /**
|
|
|
+ * Post-process which contains the original scene color before the pipeline applies all the effects
|
|
|
+ */
|
|
|
public originalPostProcess: Nullable<PostProcess>;
|
|
|
+ /**
|
|
|
+ * Post-process used to down scale an image x4
|
|
|
+ */
|
|
|
public downSampleX4PostProcess: Nullable<PostProcess> = null;
|
|
|
+ /**
|
|
|
+ * Post-process used to calculate the illuminated surfaces controlled by a threshold
|
|
|
+ */
|
|
|
public brightPassPostProcess: Nullable<PostProcess> = null;
|
|
|
+ /**
|
|
|
+ * Post-process array storing all the horizontal blur post-processes used by the pipeline
|
|
|
+ */
|
|
|
public blurHPostProcesses: PostProcess[] = [];
|
|
|
+ /**
|
|
|
+ * Post-process array storing all the vertical blur post-processes used by the pipeline
|
|
|
+ */
|
|
|
public blurVPostProcesses: PostProcess[] = [];
|
|
|
+ /**
|
|
|
+ * Post-process used to add colors of 2 textures (typically brightness + real scene color)
|
|
|
+ */
|
|
|
public textureAdderPostProcess: Nullable<PostProcess> = null;
|
|
|
|
|
|
+ /**
|
|
|
+ * Post-process used to create volumetric lighting effect
|
|
|
+ */
|
|
|
public volumetricLightPostProcess: Nullable<PostProcess> = null;
|
|
|
+ /**
|
|
|
+ * Post-process used to smooth the previous volumetric light post-process on the X axis
|
|
|
+ */
|
|
|
public volumetricLightSmoothXPostProcess: Nullable<BlurPostProcess> = null;
|
|
|
+ /**
|
|
|
+ * Post-process used to smooth the previous volumetric light post-process on the Y axis
|
|
|
+ */
|
|
|
public volumetricLightSmoothYPostProcess: Nullable<BlurPostProcess> = null;
|
|
|
+ /**
|
|
|
+ * Post-process used to merge the volumetric light effect and the real scene color
|
|
|
+ */
|
|
|
public volumetricLightMergePostProces: Nullable<PostProcess> = null;
|
|
|
+ /**
|
|
|
+ * Post-process used to store the final volumetric light post-process (attach/detach for debug purpose)
|
|
|
+ */
|
|
|
public volumetricLightFinalPostProcess: Nullable<PostProcess> = null;
|
|
|
|
|
|
+ /**
|
|
|
+ * Base post-process used to calculate the average luminance of the final image for HDR
|
|
|
+ */
|
|
|
public luminancePostProcess: Nullable<PostProcess> = null;
|
|
|
+ /**
|
|
|
+ * Post-processes used to create down sample post-processes in order to get
|
|
|
+ * the average luminance of the final image for HDR
|
|
|
+ * Array of length "StandardRenderingPipeline.LuminanceSteps"
|
|
|
+ */
|
|
|
public luminanceDownSamplePostProcesses: PostProcess[] = [];
|
|
|
+ /**
|
|
|
+ * Post-process used to create a HDR effect (light adaptation)
|
|
|
+ */
|
|
|
public hdrPostProcess: Nullable<PostProcess> = null;
|
|
|
-
|
|
|
+ /**
|
|
|
+ * Post-process used to store the final texture adder post-process (attach/detach for debug purpose)
|
|
|
+ */
|
|
|
public textureAdderFinalPostProcess: Nullable<PostProcess> = null;
|
|
|
+ /**
|
|
|
+ * Post-process used to store the final lens flare post-process (attach/detach for debug purpose)
|
|
|
+ */
|
|
|
public lensFlareFinalPostProcess: Nullable<PostProcess> = null;
|
|
|
+ /**
|
|
|
+ * Post-process used to merge the final HDR post-process and the real scene color
|
|
|
+ */
|
|
|
public hdrFinalPostProcess: Nullable<PostProcess> = null;
|
|
|
-
|
|
|
+ /**
|
|
|
+ * Post-process used to create a lens flare effect
|
|
|
+ */
|
|
|
public lensFlarePostProcess: Nullable<PostProcess> = null;
|
|
|
+ /**
|
|
|
+ * Post-process that merges the result of the lens flare post-process and the real scene color
|
|
|
+ */
|
|
|
public lensFlareComposePostProcess: Nullable<PostProcess> = null;
|
|
|
-
|
|
|
+ /**
|
|
|
+ * Post-process used to create a motion blur effect
|
|
|
+ */
|
|
|
public motionBlurPostProcess: Nullable<PostProcess> = null;
|
|
|
-
|
|
|
+ /**
|
|
|
+ * Post-process used to create a depth of field effect
|
|
|
+ */
|
|
|
public depthOfFieldPostProcess: Nullable<PostProcess> = null;
|
|
|
+ /**
|
|
|
+ * The Fast Approximate Anti-Aliasing post process which attemps to remove aliasing from an image.
|
|
|
+ */
|
|
|
+ public fxaaPostProcess: Nullable<FxaaPostProcess> = null;
|
|
|
|
|
|
// Values
|
|
|
+
|
|
|
+ /**
|
|
|
+ * Represents the brightness threshold in order to configure the illuminated surfaces
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public brightThreshold: number = 1.0;
|
|
|
|
|
|
+ /**
|
|
|
+ * Configures the blur intensity used for surexposed surfaces are highlighted surfaces (light halo)
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public blurWidth: number = 512.0;
|
|
|
+ /**
|
|
|
+ * Sets if the blur for highlighted surfaces must be only horizontal
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public horizontalBlur: boolean = false;
|
|
|
|
|
|
+ /**
|
|
|
+ * Sets the overall exposure used by the pipeline
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public exposure: number = 1.0;
|
|
|
+
|
|
|
+ /**
|
|
|
+ * Texture used typically to simulate "dirty" on camera lens
|
|
|
+ */
|
|
|
@serializeAsTexture("lensTexture")
|
|
|
public lensTexture: Nullable<Texture> = null;
|
|
|
|
|
|
+ /**
|
|
|
+ * Represents the offset coefficient based on Rayleigh principle. Typically in interval [-0.2, 0.2]
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public volumetricLightCoefficient: number = 0.2;
|
|
|
+ /**
|
|
|
+ * The overall power of volumetric lights, typically in interval [0, 10] maximum
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public volumetricLightPower: number = 4.0;
|
|
|
+ /**
|
|
|
+ * Used the set the blur intensity to smooth the volumetric lights
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public volumetricLightBlurScale: number = 64.0;
|
|
|
-
|
|
|
+ /**
|
|
|
+ * Light (spot or directional) used to generate the volumetric lights rays
|
|
|
+ * The source light must have a shadow generate so the pipeline can get its
|
|
|
+ * depth map
|
|
|
+ */
|
|
|
public sourceLight: Nullable<SpotLight | DirectionalLight> = null;
|
|
|
|
|
|
+ /**
|
|
|
+ * For eye adaptation, represents the minimum luminance the eye can see
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public hdrMinimumLuminance: number = 1.0;
|
|
|
+ /**
|
|
|
+ * For eye adaptation, represents the decrease luminance speed
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public hdrDecreaseRate: number = 0.5;
|
|
|
+ /**
|
|
|
+ * For eye adaptation, represents the increase luminance speed
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public hdrIncreaseRate: number = 0.5;
|
|
|
|
|
|
+ /**
|
|
|
+ * Lens color texture used by the lens flare effect. Mandatory if lens flare effect enabled
|
|
|
+ */
|
|
|
@serializeAsTexture("lensColorTexture")
|
|
|
public lensColorTexture: Nullable<Texture> = null;
|
|
|
+ /**
|
|
|
+ * The overall strengh for the lens flare effect
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public lensFlareStrength: number = 20.0;
|
|
|
+ /**
|
|
|
+ * Dispersion coefficient for lens flare ghosts
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public lensFlareGhostDispersal: number = 1.4;
|
|
|
+ /**
|
|
|
+ * Main lens flare halo width
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public lensFlareHaloWidth: number = 0.7;
|
|
|
+ /**
|
|
|
+ * Based on the lens distortion effect, defines how much the lens flare result
|
|
|
+ * is distorted
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public lensFlareDistortionStrength: number = 16.0;
|
|
|
+ /**
|
|
|
+ * Lens star texture must be used to simulate rays on the flares and is available
|
|
|
+ * in the documentation
|
|
|
+ */
|
|
|
@serializeAsTexture("lensStarTexture")
|
|
|
public lensStarTexture: Nullable<Texture> = null;
|
|
|
+ /**
|
|
|
+ * As the "lensTexture" (can be the same texture or different), it is used to apply the lens
|
|
|
+ * flare effect by taking account of the dirt texture
|
|
|
+ */
|
|
|
@serializeAsTexture("lensFlareDirtTexture")
|
|
|
public lensFlareDirtTexture: Nullable<Texture> = null;
|
|
|
|
|
|
+ /**
|
|
|
+ * Represents the focal length for the depth of field effect
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public depthOfFieldDistance: number = 10.0;
|
|
|
-
|
|
|
+ /**
|
|
|
+ * Represents the blur intensity for the blurred part of the depth of field effect
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public depthOfFieldBlurWidth: number = 64.0;
|
|
|
|
|
|
+ /**
|
|
|
+ * For motion blur, defines how much the image is blurred by the movement
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public motionStrength: number = 1.0;
|
|
|
|
|
|
- // IAnimatable
|
|
|
+ /**
|
|
|
+ * List of animations for the pipeline (IAnimatable implementation)
|
|
|
+ */
|
|
|
public animations: Animation[] = [];
|
|
|
|
|
|
/**
|
|
@@ -104,16 +242,21 @@
|
|
|
private _ratio: number;
|
|
|
|
|
|
// Getters and setters
|
|
|
- private _bloomEnabled: boolean = true;
|
|
|
+ private _bloomEnabled: boolean = false;
|
|
|
private _depthOfFieldEnabled: boolean = false;
|
|
|
private _vlsEnabled: boolean = false;
|
|
|
private _lensFlareEnabled: boolean = false;
|
|
|
private _hdrEnabled: boolean = false;
|
|
|
private _motionBlurEnabled: boolean = false;
|
|
|
+ private _fxaaEnabled: boolean = false;
|
|
|
|
|
|
private _motionBlurSamples: number = 64.0;
|
|
|
private _volumetricLightStepsCount: number = 50.0;
|
|
|
+ private _samples: number = 1;
|
|
|
|
|
|
+ /**
|
|
|
+ * Specifies if the bloom pipeline is enabled
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public get BloomEnabled(): boolean {
|
|
|
return this._bloomEnabled;
|
|
@@ -128,6 +271,9 @@
|
|
|
this._buildPipeline();
|
|
|
}
|
|
|
|
|
|
+ /**
|
|
|
+ * Specifies if the depth of field pipeline is enabed
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public get DepthOfFieldEnabled(): boolean {
|
|
|
return this._depthOfFieldEnabled;
|
|
@@ -142,6 +288,9 @@
|
|
|
this._buildPipeline();
|
|
|
}
|
|
|
|
|
|
+ /**
|
|
|
+ * Specifies if the lens flare pipeline is enabed
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public get LensFlareEnabled(): boolean {
|
|
|
return this._lensFlareEnabled;
|
|
@@ -156,6 +305,9 @@
|
|
|
this._buildPipeline();
|
|
|
}
|
|
|
|
|
|
+ /**
|
|
|
+ * Specifies if the HDR pipeline is enabled
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public get HDREnabled(): boolean {
|
|
|
return this._hdrEnabled;
|
|
@@ -170,6 +322,9 @@
|
|
|
this._buildPipeline();
|
|
|
}
|
|
|
|
|
|
+ /**
|
|
|
+ * Specifies if the volumetric lights scattering effect is enabled
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public get VLSEnabled(): boolean {
|
|
|
return this._vlsEnabled;
|
|
@@ -192,6 +347,9 @@
|
|
|
this._buildPipeline();
|
|
|
}
|
|
|
|
|
|
+ /**
|
|
|
+ * Specifies if the motion blur effect is enabled
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public get MotionBlurEnabled(): boolean {
|
|
|
return this._motionBlurEnabled;
|
|
@@ -206,6 +364,27 @@
|
|
|
this._buildPipeline();
|
|
|
}
|
|
|
|
|
|
+ /**
|
|
|
+ * Specifies if anti-aliasing is enabled
|
|
|
+ */
|
|
|
+ @serialize()
|
|
|
+ public get fxaaEnabled(): boolean {
|
|
|
+ return this._fxaaEnabled;
|
|
|
+ }
|
|
|
+
|
|
|
+ public set fxaaEnabled(enabled: boolean) {
|
|
|
+ if (this._fxaaEnabled === enabled) {
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ this._fxaaEnabled = enabled;
|
|
|
+ this._buildPipeline();
|
|
|
+ }
|
|
|
+
|
|
|
+ /**
|
|
|
+ * Specifies the number of steps used to calculate the volumetric lights
|
|
|
+ * Typically in interval [50, 200]
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public get volumetricLightStepsCount(): number {
|
|
|
return this._volumetricLightStepsCount;
|
|
@@ -219,6 +398,10 @@
|
|
|
this._volumetricLightStepsCount = count;
|
|
|
}
|
|
|
|
|
|
+ /**
|
|
|
+ * Specifies the number of samples used for the motion blur effect
|
|
|
+ * Typically in interval [16, 64]
|
|
|
+ */
|
|
|
@serialize()
|
|
|
public get motionBlurSamples(): number {
|
|
|
return this._motionBlurSamples;
|
|
@@ -233,6 +416,23 @@
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
+ * Specifies MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1)
|
|
|
+ */
|
|
|
+ @serialize()
|
|
|
+ public get samples(): number {
|
|
|
+ return this._samples;
|
|
|
+ }
|
|
|
+
|
|
|
+ public set samples(sampleCount: number) {
|
|
|
+ if (this._samples === sampleCount) {
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ this._samples = sampleCount;
|
|
|
+ this._buildPipeline();
|
|
|
+ }
|
|
|
+
|
|
|
+ /**
|
|
|
* @constructor
|
|
|
* @param {string} name - The rendering pipeline name
|
|
|
* @param {BABYLON.Scene} scene - The scene linked to this pipeline
|
|
@@ -340,9 +540,19 @@
|
|
|
this._createMotionBlurPostProcess(scene, ratio);
|
|
|
}
|
|
|
|
|
|
+ if (this._fxaaEnabled) {
|
|
|
+ // Create fxaa post-process
|
|
|
+ this.fxaaPostProcess = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Engine.TEXTURETYPE_UNSIGNED_INT);
|
|
|
+ this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRFxaa", () => { return this.fxaaPostProcess; }, true));
|
|
|
+ }
|
|
|
+
|
|
|
if (this._cameras !== null) {
|
|
|
this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
|
|
|
}
|
|
|
+
|
|
|
+ if (!this._enableMSAAOnFirstPostProcess(this._samples) && this._samples > 1){
|
|
|
+ BABYLON.Tools.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0");
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
// Down Sample X4 Post-Processs
|
|
@@ -780,6 +990,8 @@
|
|
|
|
|
|
if (this.motionBlurPostProcess) { this.motionBlurPostProcess.dispose(camera); }
|
|
|
|
|
|
+ if (this.fxaaPostProcess) { this.fxaaPostProcess.dispose(camera); }
|
|
|
+
|
|
|
for (var j = 0; j < this.blurHPostProcesses.length; j++) {
|
|
|
this.blurHPostProcesses[j].dispose(camera);
|
|
|
}
|
|
@@ -806,6 +1018,7 @@
|
|
|
this.hdrFinalPostProcess = null;
|
|
|
this.depthOfFieldPostProcess = null;
|
|
|
this.motionBlurPostProcess = null;
|
|
|
+ this.fxaaPostProcess = null;
|
|
|
|
|
|
this.luminanceDownSamplePostProcesses = [];
|
|
|
this.blurHPostProcesses = [];
|