standardRenderingPipeline.ts 50 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114
  1. import { Nullable } from "../../../types";
  2. import { serialize, serializeAsTexture, SerializationHelper } from "../../../Misc/decorators";
  3. import { IAnimatable } from "../../../Misc/tools";
  4. import { Logger } from "../../../Misc/logger";
  5. import { Vector2, Vector3, Matrix, Vector4 } from "../../../Maths/math";
  6. import { Scalar } from "../../../Maths/math.scalar";
  7. import { Camera } from "../../../Cameras/camera";
  8. import { Effect } from "../../../Materials/effect";
  9. import { Texture } from "../../../Materials/Textures/texture";
  10. import { PostProcess } from "../../../PostProcesses/postProcess";
  11. import { PostProcessRenderPipeline } from "../../../PostProcesses/RenderPipeline/postProcessRenderPipeline";
  12. import { PostProcessRenderEffect } from "../../../PostProcesses/RenderPipeline/postProcessRenderEffect";
  13. import { BlurPostProcess } from "../../../PostProcesses/blurPostProcess";
  14. import { FxaaPostProcess } from "../../../PostProcesses/fxaaPostProcess";
  15. import { IDisposable } from "../../../scene";
  16. import { SpotLight } from "../../../Lights/spotLight";
  17. import { DirectionalLight } from "../../../Lights/directionalLight";
  18. import { GeometryBufferRenderer } from "../../../Rendering/geometryBufferRenderer";
  19. import { Scene } from "../../../scene";
  20. import { Animation } from "../../../Animations/animation";
  21. import { Constants } from "../../../Engines/constants";
  22. import { _TypeStore } from '../../../Misc/typeStore';
  23. import "../../../Shaders/standard.fragment";
  24. /**
  25. * Standard rendering pipeline
  26. * Default pipeline should be used going forward but the standard pipeline will be kept for backwards compatibility.
  27. * @see https://doc.babylonjs.com/how_to/using_standard_rendering_pipeline
  28. */
  29. export class StandardRenderingPipeline extends PostProcessRenderPipeline implements IDisposable, IAnimatable {
  30. /**
  31. * Public members
  32. */
  33. // Post-processes
  34. /**
  35. * Post-process which contains the original scene color before the pipeline applies all the effects
  36. */
  37. public originalPostProcess: Nullable<PostProcess>;
  38. /**
  39. * Post-process used to down scale an image x4
  40. */
  41. public downSampleX4PostProcess: Nullable<PostProcess> = null;
  42. /**
  43. * Post-process used to calculate the illuminated surfaces controlled by a threshold
  44. */
  45. public brightPassPostProcess: Nullable<PostProcess> = null;
  46. /**
  47. * Post-process array storing all the horizontal blur post-processes used by the pipeline
  48. */
  49. public blurHPostProcesses: PostProcess[] = [];
  50. /**
  51. * Post-process array storing all the vertical blur post-processes used by the pipeline
  52. */
  53. public blurVPostProcesses: PostProcess[] = [];
  54. /**
  55. * Post-process used to add colors of 2 textures (typically brightness + real scene color)
  56. */
  57. public textureAdderPostProcess: Nullable<PostProcess> = null;
  58. /**
  59. * Post-process used to create volumetric lighting effect
  60. */
  61. public volumetricLightPostProcess: Nullable<PostProcess> = null;
  62. /**
  63. * Post-process used to smooth the previous volumetric light post-process on the X axis
  64. */
  65. public volumetricLightSmoothXPostProcess: Nullable<BlurPostProcess> = null;
  66. /**
  67. * Post-process used to smooth the previous volumetric light post-process on the Y axis
  68. */
  69. public volumetricLightSmoothYPostProcess: Nullable<BlurPostProcess> = null;
  70. /**
  71. * Post-process used to merge the volumetric light effect and the real scene color
  72. */
  73. public volumetricLightMergePostProces: Nullable<PostProcess> = null;
  74. /**
  75. * Post-process used to store the final volumetric light post-process (attach/detach for debug purpose)
  76. */
  77. public volumetricLightFinalPostProcess: Nullable<PostProcess> = null;
  78. /**
  79. * Base post-process used to calculate the average luminance of the final image for HDR
  80. */
  81. public luminancePostProcess: Nullable<PostProcess> = null;
  82. /**
  83. * Post-processes used to create down sample post-processes in order to get
  84. * the average luminance of the final image for HDR
  85. * Array of length "StandardRenderingPipeline.LuminanceSteps"
  86. */
  87. public luminanceDownSamplePostProcesses: PostProcess[] = [];
  88. /**
  89. * Post-process used to create a HDR effect (light adaptation)
  90. */
  91. public hdrPostProcess: Nullable<PostProcess> = null;
  92. /**
  93. * Post-process used to store the final texture adder post-process (attach/detach for debug purpose)
  94. */
  95. public textureAdderFinalPostProcess: Nullable<PostProcess> = null;
  96. /**
  97. * Post-process used to store the final lens flare post-process (attach/detach for debug purpose)
  98. */
  99. public lensFlareFinalPostProcess: Nullable<PostProcess> = null;
  100. /**
  101. * Post-process used to merge the final HDR post-process and the real scene color
  102. */
  103. public hdrFinalPostProcess: Nullable<PostProcess> = null;
  104. /**
  105. * Post-process used to create a lens flare effect
  106. */
  107. public lensFlarePostProcess: Nullable<PostProcess> = null;
  108. /**
  109. * Post-process that merges the result of the lens flare post-process and the real scene color
  110. */
  111. public lensFlareComposePostProcess: Nullable<PostProcess> = null;
  112. /**
  113. * Post-process used to create a motion blur effect
  114. */
  115. public motionBlurPostProcess: Nullable<PostProcess> = null;
  116. /**
  117. * Post-process used to create a depth of field effect
  118. */
  119. public depthOfFieldPostProcess: Nullable<PostProcess> = null;
  120. /**
  121. * The Fast Approximate Anti-Aliasing post process which attemps to remove aliasing from an image.
  122. */
  123. public fxaaPostProcess: Nullable<FxaaPostProcess> = null;
  124. // Values
  125. /**
  126. * Represents the brightness threshold in order to configure the illuminated surfaces
  127. */
  128. @serialize()
  129. public brightThreshold: number = 1.0;
  130. /**
  131. * Configures the blur intensity used for surexposed surfaces are highlighted surfaces (light halo)
  132. */
  133. @serialize()
  134. public blurWidth: number = 512.0;
  135. /**
  136. * Sets if the blur for highlighted surfaces must be only horizontal
  137. */
  138. @serialize()
  139. public horizontalBlur: boolean = false;
  140. /**
  141. * Sets the overall exposure used by the pipeline
  142. */
  143. @serialize()
  144. public exposure: number = 1.0;
  145. /**
  146. * Texture used typically to simulate "dirty" on camera lens
  147. */
  148. @serializeAsTexture("lensTexture")
  149. public lensTexture: Nullable<Texture> = null;
  150. /**
  151. * Represents the offset coefficient based on Rayleigh principle. Typically in interval [-0.2, 0.2]
  152. */
  153. @serialize()
  154. public volumetricLightCoefficient: number = 0.2;
  155. /**
  156. * The overall power of volumetric lights, typically in interval [0, 10] maximum
  157. */
  158. @serialize()
  159. public volumetricLightPower: number = 4.0;
  160. /**
  161. * Used the set the blur intensity to smooth the volumetric lights
  162. */
  163. @serialize()
  164. public volumetricLightBlurScale: number = 64.0;
  165. /**
  166. * Light (spot or directional) used to generate the volumetric lights rays
  167. * The source light must have a shadow generate so the pipeline can get its
  168. * depth map
  169. */
  170. public sourceLight: Nullable<SpotLight | DirectionalLight> = null;
  171. /**
  172. * For eye adaptation, represents the minimum luminance the eye can see
  173. */
  174. @serialize()
  175. public hdrMinimumLuminance: number = 1.0;
  176. /**
  177. * For eye adaptation, represents the decrease luminance speed
  178. */
  179. @serialize()
  180. public hdrDecreaseRate: number = 0.5;
  181. /**
  182. * For eye adaptation, represents the increase luminance speed
  183. */
  184. @serialize()
  185. public hdrIncreaseRate: number = 0.5;
  186. /**
  187. * Lens color texture used by the lens flare effect. Mandatory if lens flare effect enabled
  188. */
  189. @serializeAsTexture("lensColorTexture")
  190. public lensColorTexture: Nullable<Texture> = null;
  191. /**
  192. * The overall strengh for the lens flare effect
  193. */
  194. @serialize()
  195. public lensFlareStrength: number = 20.0;
  196. /**
  197. * Dispersion coefficient for lens flare ghosts
  198. */
  199. @serialize()
  200. public lensFlareGhostDispersal: number = 1.4;
  201. /**
  202. * Main lens flare halo width
  203. */
  204. @serialize()
  205. public lensFlareHaloWidth: number = 0.7;
  206. /**
  207. * Based on the lens distortion effect, defines how much the lens flare result
  208. * is distorted
  209. */
  210. @serialize()
  211. public lensFlareDistortionStrength: number = 16.0;
  212. /**
  213. * Lens star texture must be used to simulate rays on the flares and is available
  214. * in the documentation
  215. */
  216. @serializeAsTexture("lensStarTexture")
  217. public lensStarTexture: Nullable<Texture> = null;
  218. /**
  219. * As the "lensTexture" (can be the same texture or different), it is used to apply the lens
  220. * flare effect by taking account of the dirt texture
  221. */
  222. @serializeAsTexture("lensFlareDirtTexture")
  223. public lensFlareDirtTexture: Nullable<Texture> = null;
  224. /**
  225. * Represents the focal length for the depth of field effect
  226. */
  227. @serialize()
  228. public depthOfFieldDistance: number = 10.0;
  229. /**
  230. * Represents the blur intensity for the blurred part of the depth of field effect
  231. */
  232. @serialize()
  233. public depthOfFieldBlurWidth: number = 64.0;
  234. /**
  235. * For motion blur, defines how much the image is blurred by the movement
  236. */
  237. @serialize()
  238. public motionStrength: number = 1.0;
  239. /**
  240. * List of animations for the pipeline (IAnimatable implementation)
  241. */
  242. public animations: Animation[] = [];
  243. /**
  244. * Private members
  245. */
  246. private _scene: Scene;
  247. private _currentDepthOfFieldSource: Nullable<PostProcess> = null;
  248. private _basePostProcess: Nullable<PostProcess>;
  249. private _hdrCurrentLuminance: number = 1.0;
  250. private _floatTextureType: number;
  251. @serialize()
  252. private _ratio: number;
  253. // Getters and setters
  254. private _bloomEnabled: boolean = false;
  255. private _depthOfFieldEnabled: boolean = false;
  256. private _vlsEnabled: boolean = false;
  257. private _lensFlareEnabled: boolean = false;
  258. private _hdrEnabled: boolean = false;
  259. private _motionBlurEnabled: boolean = false;
  260. private _fxaaEnabled: boolean = false;
  261. private _motionBlurSamples: number = 64.0;
  262. private _volumetricLightStepsCount: number = 50.0;
  263. private _samples: number = 1;
  264. /**
  265. * @ignore
  266. * Specifies if the bloom pipeline is enabled
  267. */
  268. @serialize()
  269. public get BloomEnabled(): boolean {
  270. return this._bloomEnabled;
  271. }
  272. public set BloomEnabled(enabled: boolean) {
  273. if (this._bloomEnabled === enabled) {
  274. return;
  275. }
  276. this._bloomEnabled = enabled;
  277. this._buildPipeline();
  278. }
  279. /**
  280. * @ignore
  281. * Specifies if the depth of field pipeline is enabed
  282. */
  283. @serialize()
  284. public get DepthOfFieldEnabled(): boolean {
  285. return this._depthOfFieldEnabled;
  286. }
  287. public set DepthOfFieldEnabled(enabled: boolean) {
  288. if (this._depthOfFieldEnabled === enabled) {
  289. return;
  290. }
  291. this._depthOfFieldEnabled = enabled;
  292. this._buildPipeline();
  293. }
  294. /**
  295. * @ignore
  296. * Specifies if the lens flare pipeline is enabed
  297. */
  298. @serialize()
  299. public get LensFlareEnabled(): boolean {
  300. return this._lensFlareEnabled;
  301. }
  302. public set LensFlareEnabled(enabled: boolean) {
  303. if (this._lensFlareEnabled === enabled) {
  304. return;
  305. }
  306. this._lensFlareEnabled = enabled;
  307. this._buildPipeline();
  308. }
  309. /**
  310. * @ignore
  311. * Specifies if the HDR pipeline is enabled
  312. */
  313. @serialize()
  314. public get HDREnabled(): boolean {
  315. return this._hdrEnabled;
  316. }
  317. public set HDREnabled(enabled: boolean) {
  318. if (this._hdrEnabled === enabled) {
  319. return;
  320. }
  321. this._hdrEnabled = enabled;
  322. this._buildPipeline();
  323. }
  324. /**
  325. * @ignore
  326. * Specifies if the volumetric lights scattering effect is enabled
  327. */
  328. @serialize()
  329. public get VLSEnabled(): boolean {
  330. return this._vlsEnabled;
  331. }
  332. public set VLSEnabled(enabled) {
  333. if (this._vlsEnabled === enabled) {
  334. return;
  335. }
  336. if (enabled) {
  337. var geometry = this._scene.enableGeometryBufferRenderer();
  338. if (!geometry) {
  339. Logger.Warn("Geometry renderer is not supported, cannot create volumetric lights in Standard Rendering Pipeline");
  340. return;
  341. }
  342. }
  343. this._vlsEnabled = enabled;
  344. this._buildPipeline();
  345. }
  346. /**
  347. * @ignore
  348. * Specifies if the motion blur effect is enabled
  349. */
  350. @serialize()
  351. public get MotionBlurEnabled(): boolean {
  352. return this._motionBlurEnabled;
  353. }
  354. public set MotionBlurEnabled(enabled: boolean) {
  355. if (this._motionBlurEnabled === enabled) {
  356. return;
  357. }
  358. this._motionBlurEnabled = enabled;
  359. this._buildPipeline();
  360. }
  361. /**
  362. * Specifies if anti-aliasing is enabled
  363. */
  364. @serialize()
  365. public get fxaaEnabled(): boolean {
  366. return this._fxaaEnabled;
  367. }
  368. public set fxaaEnabled(enabled: boolean) {
  369. if (this._fxaaEnabled === enabled) {
  370. return;
  371. }
  372. this._fxaaEnabled = enabled;
  373. this._buildPipeline();
  374. }
  375. /**
  376. * Specifies the number of steps used to calculate the volumetric lights
  377. * Typically in interval [50, 200]
  378. */
  379. @serialize()
  380. public get volumetricLightStepsCount(): number {
  381. return this._volumetricLightStepsCount;
  382. }
  383. public set volumetricLightStepsCount(count: number) {
  384. if (this.volumetricLightPostProcess) {
  385. this.volumetricLightPostProcess.updateEffect("#define VLS\n#define NB_STEPS " + count.toFixed(1));
  386. }
  387. this._volumetricLightStepsCount = count;
  388. }
  389. /**
  390. * Specifies the number of samples used for the motion blur effect
  391. * Typically in interval [16, 64]
  392. */
  393. @serialize()
  394. public get motionBlurSamples(): number {
  395. return this._motionBlurSamples;
  396. }
  397. public set motionBlurSamples(samples: number) {
  398. if (this.motionBlurPostProcess) {
  399. this.motionBlurPostProcess.updateEffect("#define MOTION_BLUR\n#define MAX_MOTION_SAMPLES " + samples.toFixed(1));
  400. }
  401. this._motionBlurSamples = samples;
  402. }
  403. /**
  404. * Specifies MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1)
  405. */
  406. @serialize()
  407. public get samples(): number {
  408. return this._samples;
  409. }
  410. public set samples(sampleCount: number) {
  411. if (this._samples === sampleCount) {
  412. return;
  413. }
  414. this._samples = sampleCount;
  415. this._buildPipeline();
  416. }
  417. /**
  418. * Default pipeline should be used going forward but the standard pipeline will be kept for backwards compatibility.
  419. * @constructor
  420. * @param name The rendering pipeline name
  421. * @param scene The scene linked to this pipeline
  422. * @param ratio The size of the postprocesses (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5)
  423. * @param originalPostProcess the custom original color post-process. Must be "reusable". Can be null.
  424. * @param cameras The array of cameras that the rendering pipeline will be attached to
  425. */
  426. constructor(name: string, scene: Scene, ratio: number, originalPostProcess: Nullable<PostProcess> = null, cameras?: Camera[]) {
  427. super(scene.getEngine(), name);
  428. this._cameras = cameras || [];
  429. // Initialize
  430. this._scene = scene;
  431. this._basePostProcess = originalPostProcess;
  432. this._ratio = ratio;
  433. // Misc
  434. this._floatTextureType = scene.getEngine().getCaps().textureFloatRender ? Constants.TEXTURETYPE_FLOAT : Constants.TEXTURETYPE_HALF_FLOAT;
  435. // Finish
  436. scene.postProcessRenderPipelineManager.addPipeline(this);
  437. this._buildPipeline();
  438. }
  439. private _buildPipeline(): void {
  440. var ratio = this._ratio;
  441. var scene = this._scene;
  442. this._disposePostProcesses();
  443. this._reset();
  444. // Create pass post-process
  445. if (!this._basePostProcess) {
  446. this.originalPostProcess = new PostProcess("HDRPass", "standard", [], [], ratio, null, Constants.TEXTURE_BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", this._floatTextureType);
  447. this.originalPostProcess.onApply = () => {
  448. this._currentDepthOfFieldSource = this.originalPostProcess;
  449. };
  450. }
  451. else {
  452. this.originalPostProcess = this._basePostProcess;
  453. }
  454. if (this._bloomEnabled || this._vlsEnabled || this._lensFlareEnabled || this._depthOfFieldEnabled || this._motionBlurEnabled) {
  455. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPassPostProcess", () => { return this.originalPostProcess; }, true));
  456. }
  457. this._currentDepthOfFieldSource = this.originalPostProcess;
  458. if (this._bloomEnabled) {
  459. // Create down sample X4 post-process
  460. this._createDownSampleX4PostProcess(scene, ratio / 2);
  461. // Create bright pass post-process
  462. this._createBrightPassPostProcess(scene, ratio / 2);
  463. // Create gaussian blur post-processes (down sampling blurs)
  464. this._createBlurPostProcesses(scene, ratio / 4, 1);
  465. // Create texture adder post-process
  466. this._createTextureAdderPostProcess(scene, ratio);
  467. // Create depth-of-field source post-process
  468. this.textureAdderFinalPostProcess = new PostProcess("HDRDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT);
  469. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBaseDepthOfFieldSource", () => { return this.textureAdderFinalPostProcess; }, true));
  470. }
  471. if (this._vlsEnabled) {
  472. // Create volumetric light
  473. this._createVolumetricLightPostProcess(scene, ratio);
  474. // Create volumetric light final post-process
  475. this.volumetricLightFinalPostProcess = new PostProcess("HDRVLSFinal", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT);
  476. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSFinal", () => { return this.volumetricLightFinalPostProcess; }, true));
  477. }
  478. if (this._lensFlareEnabled) {
  479. // Create lens flare post-process
  480. this._createLensFlarePostProcess(scene, ratio);
  481. // Create depth-of-field source post-process post lens-flare and disable it now
  482. this.lensFlareFinalPostProcess = new PostProcess("HDRPostLensFlareDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT);
  483. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostLensFlareDepthOfFieldSource", () => { return this.lensFlareFinalPostProcess; }, true));
  484. }
  485. if (this._hdrEnabled) {
  486. // Create luminance
  487. this._createLuminancePostProcesses(scene, this._floatTextureType);
  488. // Create HDR
  489. this._createHdrPostProcess(scene, ratio);
  490. // Create depth-of-field source post-process post hdr and disable it now
  491. this.hdrFinalPostProcess = new PostProcess("HDRPostHDReDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT);
  492. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostHDReDepthOfFieldSource", () => { return this.hdrFinalPostProcess; }, true));
  493. }
  494. if (this._depthOfFieldEnabled) {
  495. // Create gaussian blur used by depth-of-field
  496. this._createBlurPostProcesses(scene, ratio / 2, 3, "depthOfFieldBlurWidth");
  497. // Create depth-of-field post-process
  498. this._createDepthOfFieldPostProcess(scene, ratio);
  499. }
  500. if (this._motionBlurEnabled) {
  501. // Create motion blur post-process
  502. this._createMotionBlurPostProcess(scene, ratio);
  503. }
  504. if (this._fxaaEnabled) {
  505. // Create fxaa post-process
  506. this.fxaaPostProcess = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT);
  507. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRFxaa", () => { return this.fxaaPostProcess; }, true));
  508. }
  509. if (this._cameras !== null) {
  510. this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
  511. }
  512. if (!this._enableMSAAOnFirstPostProcess(this._samples) && this._samples > 1) {
  513. Logger.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0");
  514. }
  515. }
  516. // Down Sample X4 Post-Processs
  517. private _createDownSampleX4PostProcess(scene: Scene, ratio: number): void {
  518. var downSampleX4Offsets = new Array<number>(32);
  519. this.downSampleX4PostProcess = new PostProcess("HDRDownSampleX4", "standard", ["dsOffsets"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DOWN_SAMPLE_X4", Constants.TEXTURETYPE_UNSIGNED_INT);
  520. this.downSampleX4PostProcess.onApply = (effect: Effect) => {
  521. var id = 0;
  522. let width = (<PostProcess>this.downSampleX4PostProcess).width;
  523. let height = (<PostProcess>this.downSampleX4PostProcess).height;
  524. for (var i = -2; i < 2; i++) {
  525. for (var j = -2; j < 2; j++) {
  526. downSampleX4Offsets[id] = (i + 0.5) * (1.0 / width);
  527. downSampleX4Offsets[id + 1] = (j + 0.5) * (1.0 / height);
  528. id += 2;
  529. }
  530. }
  531. effect.setArray2("dsOffsets", downSampleX4Offsets);
  532. };
  533. // Add to pipeline
  534. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDownSampleX4", () => { return this.downSampleX4PostProcess; }, true));
  535. }
  536. // Brightpass Post-Process
  537. private _createBrightPassPostProcess(scene: Scene, ratio: number): void {
  538. var brightOffsets = new Array<number>(8);
  539. this.brightPassPostProcess = new PostProcess("HDRBrightPass", "standard", ["dsOffsets", "brightThreshold"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define BRIGHT_PASS", Constants.TEXTURETYPE_UNSIGNED_INT);
  540. this.brightPassPostProcess.onApply = (effect: Effect) => {
  541. var sU = (1.0 / (<PostProcess>this.brightPassPostProcess).width);
  542. var sV = (1.0 / (<PostProcess>this.brightPassPostProcess).height);
  543. brightOffsets[0] = -0.5 * sU;
  544. brightOffsets[1] = 0.5 * sV;
  545. brightOffsets[2] = 0.5 * sU;
  546. brightOffsets[3] = 0.5 * sV;
  547. brightOffsets[4] = -0.5 * sU;
  548. brightOffsets[5] = -0.5 * sV;
  549. brightOffsets[6] = 0.5 * sU;
  550. brightOffsets[7] = -0.5 * sV;
  551. effect.setArray2("dsOffsets", brightOffsets);
  552. effect.setFloat("brightThreshold", this.brightThreshold);
  553. };
  554. // Add to pipeline
  555. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBrightPass", () => { return this.brightPassPostProcess; }, true));
  556. }
  557. // Create blur H&V post-processes
  558. private _createBlurPostProcesses(scene: Scene, ratio: number, indice: number, blurWidthKey: string = "blurWidth"): void {
  559. var engine = scene.getEngine();
  560. var blurX = new BlurPostProcess("HDRBlurH" + "_" + indice, new Vector2(1, 0), (<any>this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT);
  561. var blurY = new BlurPostProcess("HDRBlurV" + "_" + indice, new Vector2(0, 1), (<any>this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT);
  562. blurX.onActivateObservable.add(() => {
  563. let dw = blurX.width / engine.getRenderWidth();
  564. blurX.kernel = (<any>this)[blurWidthKey] * dw;
  565. });
  566. blurY.onActivateObservable.add(() => {
  567. let dw = blurY.height / engine.getRenderHeight();
  568. blurY.kernel = this.horizontalBlur ? 64 * dw : (<any>this)[blurWidthKey] * dw;
  569. });
  570. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBlurH" + indice, () => { return blurX; }, true));
  571. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBlurV" + indice, () => { return blurY; }, true));
  572. this.blurHPostProcesses.push(blurX);
  573. this.blurVPostProcesses.push(blurY);
  574. }
  575. // Create texture adder post-process
  576. private _createTextureAdderPostProcess(scene: Scene, ratio: number): void {
  577. this.textureAdderPostProcess = new PostProcess("HDRTextureAdder", "standard", ["exposure"], ["otherSampler", "lensSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define TEXTURE_ADDER", Constants.TEXTURETYPE_UNSIGNED_INT);
  578. this.textureAdderPostProcess.onApply = (effect: Effect) => {
  579. effect.setTextureFromPostProcess("otherSampler", this._vlsEnabled ? this._currentDepthOfFieldSource : this.originalPostProcess);
  580. effect.setTexture("lensSampler", this.lensTexture);
  581. effect.setFloat("exposure", this.exposure);
  582. this._currentDepthOfFieldSource = this.textureAdderFinalPostProcess;
  583. };
  584. // Add to pipeline
  585. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRTextureAdder", () => { return this.textureAdderPostProcess; }, true));
  586. }
  587. private _createVolumetricLightPostProcess(scene: Scene, ratio: number): void {
  588. var geometryRenderer = <GeometryBufferRenderer>scene.enableGeometryBufferRenderer();
  589. geometryRenderer.enablePosition = true;
  590. var geometry = geometryRenderer.getGBuffer();
  591. // Base post-process
  592. this.volumetricLightPostProcess = new PostProcess("HDRVLS", "standard",
  593. ["shadowViewProjection", "cameraPosition", "sunDirection", "sunColor", "scatteringCoefficient", "scatteringPower", "depthValues"],
  594. ["shadowMapSampler", "positionSampler"],
  595. ratio / 8,
  596. null,
  597. Texture.BILINEAR_SAMPLINGMODE,
  598. scene.getEngine(),
  599. false, "#define VLS\n#define NB_STEPS " + this._volumetricLightStepsCount.toFixed(1));
  600. var depthValues = Vector2.Zero();
  601. this.volumetricLightPostProcess.onApply = (effect: Effect) => {
  602. if (this.sourceLight && this.sourceLight.getShadowGenerator() && this._scene.activeCamera) {
  603. var generator = this.sourceLight.getShadowGenerator()!;
  604. effect.setTexture("shadowMapSampler", generator.getShadowMap());
  605. effect.setTexture("positionSampler", geometry.textures[2]);
  606. effect.setColor3("sunColor", this.sourceLight.diffuse);
  607. effect.setVector3("sunDirection", this.sourceLight.getShadowDirection());
  608. effect.setVector3("cameraPosition", this._scene.activeCamera.globalPosition);
  609. effect.setMatrix("shadowViewProjection", generator.getTransformMatrix());
  610. effect.setFloat("scatteringCoefficient", this.volumetricLightCoefficient);
  611. effect.setFloat("scatteringPower", this.volumetricLightPower);
  612. depthValues.x = this.sourceLight.getDepthMinZ(this._scene.activeCamera);
  613. depthValues.y = this.sourceLight.getDepthMaxZ(this._scene.activeCamera);
  614. effect.setVector2("depthValues", depthValues);
  615. }
  616. };
  617. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLS", () => { return this.volumetricLightPostProcess; }, true));
  618. // Smooth
  619. this._createBlurPostProcesses(scene, ratio / 4, 0, "volumetricLightBlurScale");
  620. // Merge
  621. this.volumetricLightMergePostProces = new PostProcess("HDRVLSMerge", "standard", [], ["originalSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define VLSMERGE");
  622. this.volumetricLightMergePostProces.onApply = (effect: Effect) => {
  623. effect.setTextureFromPostProcess("originalSampler", this._bloomEnabled ? this.textureAdderFinalPostProcess : this.originalPostProcess);
  624. this._currentDepthOfFieldSource = this.volumetricLightFinalPostProcess;
  625. };
  626. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSMerge", () => { return this.volumetricLightMergePostProces; }, true));
  627. }
  628. // Create luminance
  629. private _createLuminancePostProcesses(scene: Scene, textureType: number): void {
  630. // Create luminance
  631. var size = Math.pow(3, StandardRenderingPipeline.LuminanceSteps);
  632. this.luminancePostProcess = new PostProcess("HDRLuminance", "standard", ["lumOffsets"], [], { width: size, height: size }, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LUMINANCE", textureType);
  633. var offsets: number[] = [];
  634. this.luminancePostProcess.onApply = (effect: Effect) => {
  635. var sU = (1.0 / (<PostProcess>this.luminancePostProcess).width);
  636. var sV = (1.0 / (<PostProcess>this.luminancePostProcess).height);
  637. offsets[0] = -0.5 * sU;
  638. offsets[1] = 0.5 * sV;
  639. offsets[2] = 0.5 * sU;
  640. offsets[3] = 0.5 * sV;
  641. offsets[4] = -0.5 * sU;
  642. offsets[5] = -0.5 * sV;
  643. offsets[6] = 0.5 * sU;
  644. offsets[7] = -0.5 * sV;
  645. effect.setArray2("lumOffsets", offsets);
  646. };
  647. // Add to pipeline
  648. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLuminance", () => { return this.luminancePostProcess; }, true));
  649. // Create down sample luminance
  650. for (var i = StandardRenderingPipeline.LuminanceSteps - 1; i >= 0; i--) {
  651. var size = Math.pow(3, i);
  652. var defines = "#define LUMINANCE_DOWN_SAMPLE\n";
  653. if (i === 0) {
  654. defines += "#define FINAL_DOWN_SAMPLER";
  655. }
  656. var postProcess = new PostProcess("HDRLuminanceDownSample" + i, "standard", ["dsOffsets", "halfDestPixelSize"], [], { width: size, height: size }, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, defines, textureType);
  657. this.luminanceDownSamplePostProcesses.push(postProcess);
  658. }
  659. // Create callbacks and add effects
  660. var lastLuminance: Nullable<PostProcess> = this.luminancePostProcess;
  661. this.luminanceDownSamplePostProcesses.forEach((pp, index) => {
  662. var downSampleOffsets = new Array<number>(18);
  663. pp.onApply = (effect: Effect) => {
  664. if (!lastLuminance) {
  665. return;
  666. }
  667. var id = 0;
  668. for (var x = -1; x < 2; x++) {
  669. for (var y = -1; y < 2; y++) {
  670. downSampleOffsets[id] = x / lastLuminance.width;
  671. downSampleOffsets[id + 1] = y / lastLuminance.height;
  672. id += 2;
  673. }
  674. }
  675. effect.setArray2("dsOffsets", downSampleOffsets);
  676. effect.setFloat("halfDestPixelSize", 0.5 / lastLuminance.width);
  677. if (index === this.luminanceDownSamplePostProcesses.length - 1) {
  678. lastLuminance = this.luminancePostProcess;
  679. } else {
  680. lastLuminance = pp;
  681. }
  682. };
  683. if (index === this.luminanceDownSamplePostProcesses.length - 1) {
  684. pp.onAfterRender = () => {
  685. var pixel = scene.getEngine().readPixels(0, 0, 1, 1);
  686. var bit_shift = new Vector4(1.0 / (255.0 * 255.0 * 255.0), 1.0 / (255.0 * 255.0), 1.0 / 255.0, 1.0);
  687. this._hdrCurrentLuminance = (pixel[0] * bit_shift.x + pixel[1] * bit_shift.y + pixel[2] * bit_shift.z + pixel[3] * bit_shift.w) / 100.0;
  688. };
  689. }
  690. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLuminanceDownSample" + index, () => { return pp; }, true));
  691. });
  692. }
  693. // Create HDR post-process
  694. private _createHdrPostProcess(scene: Scene, ratio: number): void {
  695. this.hdrPostProcess = new PostProcess("HDR", "standard", ["averageLuminance"], ["textureAdderSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define HDR", Constants.TEXTURETYPE_UNSIGNED_INT);
  696. var outputLiminance = 1;
  697. var time = 0;
  698. var lastTime = 0;
  699. this.hdrPostProcess.onApply = (effect: Effect) => {
  700. effect.setTextureFromPostProcess("textureAdderSampler", this._currentDepthOfFieldSource);
  701. time += scene.getEngine().getDeltaTime();
  702. if (outputLiminance < 0) {
  703. outputLiminance = this._hdrCurrentLuminance;
  704. } else {
  705. var dt = (lastTime - time) / 1000.0;
  706. if (this._hdrCurrentLuminance < outputLiminance + this.hdrDecreaseRate * dt) {
  707. outputLiminance += this.hdrDecreaseRate * dt;
  708. }
  709. else if (this._hdrCurrentLuminance > outputLiminance - this.hdrIncreaseRate * dt) {
  710. outputLiminance -= this.hdrIncreaseRate * dt;
  711. }
  712. else {
  713. outputLiminance = this._hdrCurrentLuminance;
  714. }
  715. }
  716. outputLiminance = Scalar.Clamp(outputLiminance, this.hdrMinimumLuminance, 1e20);
  717. effect.setFloat("averageLuminance", outputLiminance);
  718. lastTime = time;
  719. this._currentDepthOfFieldSource = this.hdrFinalPostProcess;
  720. };
  721. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDR", () => { return this.hdrPostProcess; }, true));
  722. }
  723. // Create lens flare post-process
  724. private _createLensFlarePostProcess(scene: Scene, ratio: number): void {
  725. this.lensFlarePostProcess = new PostProcess("HDRLensFlare", "standard", ["strength", "ghostDispersal", "haloWidth", "resolution", "distortionStrength"], ["lensColorSampler"], ratio / 2, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE", Constants.TEXTURETYPE_UNSIGNED_INT);
  726. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlare", () => { return this.lensFlarePostProcess; }, true));
  727. this._createBlurPostProcesses(scene, ratio / 4, 2);
  728. this.lensFlareComposePostProcess = new PostProcess("HDRLensFlareCompose", "standard", ["lensStarMatrix"], ["otherSampler", "lensDirtSampler", "lensStarSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE_COMPOSE", Constants.TEXTURETYPE_UNSIGNED_INT);
  729. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlareCompose", () => { return this.lensFlareComposePostProcess; }, true));
  730. var resolution = new Vector2(0, 0);
  731. // Lens flare
  732. this.lensFlarePostProcess.onApply = (effect: Effect) => {
  733. effect.setTextureFromPostProcess("textureSampler", this._bloomEnabled ? this.blurHPostProcesses[0] : this.originalPostProcess);
  734. effect.setTexture("lensColorSampler", this.lensColorTexture);
  735. effect.setFloat("strength", this.lensFlareStrength);
  736. effect.setFloat("ghostDispersal", this.lensFlareGhostDispersal);
  737. effect.setFloat("haloWidth", this.lensFlareHaloWidth);
  738. // Shift
  739. resolution.x = (<PostProcess>this.lensFlarePostProcess).width;
  740. resolution.y = (<PostProcess>this.lensFlarePostProcess).height;
  741. effect.setVector2("resolution", resolution);
  742. effect.setFloat("distortionStrength", this.lensFlareDistortionStrength);
  743. };
  744. // Compose
  745. var scaleBias1 = Matrix.FromValues(
  746. 2.0, 0.0, -1.0, 0.0,
  747. 0.0, 2.0, -1.0, 0.0,
  748. 0.0, 0.0, 1.0, 0.0,
  749. 0.0, 0.0, 0.0, 1.0
  750. );
  751. var scaleBias2 = Matrix.FromValues(
  752. 0.5, 0.0, 0.5, 0.0,
  753. 0.0, 0.5, 0.5, 0.0,
  754. 0.0, 0.0, 1.0, 0.0,
  755. 0.0, 0.0, 0.0, 1.0
  756. );
  757. this.lensFlareComposePostProcess.onApply = (effect: Effect) => {
  758. if (!this._scene.activeCamera) {
  759. return;
  760. }
  761. effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource);
  762. effect.setTexture("lensDirtSampler", this.lensFlareDirtTexture);
  763. effect.setTexture("lensStarSampler", this.lensStarTexture);
  764. // Lens start rotation matrix
  765. var camerax = (<Vector4>this._scene.activeCamera.getViewMatrix().getRow(0));
  766. var cameraz = (<Vector4>this._scene.activeCamera.getViewMatrix().getRow(2));
  767. var camRot = Vector3.Dot(camerax.toVector3(), new Vector3(1.0, 0.0, 0.0)) + Vector3.Dot(cameraz.toVector3(), new Vector3(0.0, 0.0, 1.0));
  768. camRot *= 4.0;
  769. var starRotation = Matrix.FromValues(
  770. Math.cos(camRot) * 0.5, -Math.sin(camRot), 0.0, 0.0,
  771. Math.sin(camRot), Math.cos(camRot) * 0.5, 0.0, 0.0,
  772. 0.0, 0.0, 1.0, 0.0,
  773. 0.0, 0.0, 0.0, 1.0
  774. );
  775. var lensStarMatrix = scaleBias2.multiply(starRotation).multiply(scaleBias1);
  776. effect.setMatrix("lensStarMatrix", lensStarMatrix);
  777. this._currentDepthOfFieldSource = this.lensFlareFinalPostProcess;
  778. };
  779. }
  780. // Create depth-of-field post-process
  781. private _createDepthOfFieldPostProcess(scene: Scene, ratio: number): void {
  782. this.depthOfFieldPostProcess = new PostProcess("HDRDepthOfField", "standard", ["distance"], ["otherSampler", "depthSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DEPTH_OF_FIELD", Constants.TEXTURETYPE_UNSIGNED_INT);
  783. this.depthOfFieldPostProcess.onApply = (effect: Effect) => {
  784. effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource);
  785. effect.setTexture("depthSampler", this._getDepthTexture());
  786. effect.setFloat("distance", this.depthOfFieldDistance);
  787. };
  788. // Add to pipeline
  789. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDepthOfField", () => { return this.depthOfFieldPostProcess; }, true));
  790. }
  791. // Create motion blur post-process
  792. private _createMotionBlurPostProcess(scene: Scene, ratio: number): void {
  793. this.motionBlurPostProcess = new PostProcess("HDRMotionBlur", "standard",
  794. ["inverseViewProjection", "prevViewProjection", "screenSize", "motionScale", "motionStrength"],
  795. ["depthSampler"],
  796. ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define MOTION_BLUR\n#define MAX_MOTION_SAMPLES " + this.motionBlurSamples.toFixed(1), Constants.TEXTURETYPE_UNSIGNED_INT);
  797. var motionScale: number = 0;
  798. var prevViewProjection = Matrix.Identity();
  799. var invViewProjection = Matrix.Identity();
  800. var viewProjection = Matrix.Identity();
  801. var screenSize = Vector2.Zero();
  802. this.motionBlurPostProcess.onApply = (effect: Effect) => {
  803. viewProjection = scene.getProjectionMatrix().multiply(scene.getViewMatrix());
  804. viewProjection.invertToRef(invViewProjection);
  805. effect.setMatrix("inverseViewProjection", invViewProjection);
  806. effect.setMatrix("prevViewProjection", prevViewProjection);
  807. prevViewProjection = viewProjection;
  808. screenSize.x = (<PostProcess>this.motionBlurPostProcess).width;
  809. screenSize.y = (<PostProcess>this.motionBlurPostProcess).height;
  810. effect.setVector2("screenSize", screenSize);
  811. motionScale = scene.getEngine().getFps() / 60.0;
  812. effect.setFloat("motionScale", motionScale);
  813. effect.setFloat("motionStrength", this.motionStrength);
  814. effect.setTexture("depthSampler", this._getDepthTexture());
  815. };
  816. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRMotionBlur", () => { return this.motionBlurPostProcess; }, true));
  817. }
  818. private _getDepthTexture(): Texture {
  819. if (this._scene.getEngine().getCaps().drawBuffersExtension) {
  820. let renderer = <GeometryBufferRenderer>this._scene.enableGeometryBufferRenderer();
  821. return renderer.getGBuffer().textures[0];
  822. }
  823. return this._scene.enableDepthRenderer().getDepthMap();
  824. }
  825. private _disposePostProcesses(): void {
  826. for (var i = 0; i < this._cameras.length; i++) {
  827. var camera = this._cameras[i];
  828. if (this.originalPostProcess) { this.originalPostProcess.dispose(camera); }
  829. if (this.downSampleX4PostProcess) { this.downSampleX4PostProcess.dispose(camera); }
  830. if (this.brightPassPostProcess) { this.brightPassPostProcess.dispose(camera); }
  831. if (this.textureAdderPostProcess) { this.textureAdderPostProcess.dispose(camera); }
  832. if (this.textureAdderFinalPostProcess) { this.textureAdderFinalPostProcess.dispose(camera); }
  833. if (this.volumetricLightPostProcess) { this.volumetricLightPostProcess.dispose(camera); }
  834. if (this.volumetricLightSmoothXPostProcess) { this.volumetricLightSmoothXPostProcess.dispose(camera); }
  835. if (this.volumetricLightSmoothYPostProcess) { this.volumetricLightSmoothYPostProcess.dispose(camera); }
  836. if (this.volumetricLightMergePostProces) { this.volumetricLightMergePostProces.dispose(camera); }
  837. if (this.volumetricLightFinalPostProcess) { this.volumetricLightFinalPostProcess.dispose(camera); }
  838. if (this.lensFlarePostProcess) { this.lensFlarePostProcess.dispose(camera); }
  839. if (this.lensFlareComposePostProcess) { this.lensFlareComposePostProcess.dispose(camera); }
  840. for (var j = 0; j < this.luminanceDownSamplePostProcesses.length; j++) {
  841. this.luminanceDownSamplePostProcesses[j].dispose(camera);
  842. }
  843. if (this.luminancePostProcess) { this.luminancePostProcess.dispose(camera); }
  844. if (this.hdrPostProcess) { this.hdrPostProcess.dispose(camera); }
  845. if (this.hdrFinalPostProcess) { this.hdrFinalPostProcess.dispose(camera); }
  846. if (this.depthOfFieldPostProcess) { this.depthOfFieldPostProcess.dispose(camera); }
  847. if (this.motionBlurPostProcess) { this.motionBlurPostProcess.dispose(camera); }
  848. if (this.fxaaPostProcess) { this.fxaaPostProcess.dispose(camera); }
  849. for (var j = 0; j < this.blurHPostProcesses.length; j++) {
  850. this.blurHPostProcesses[j].dispose(camera);
  851. }
  852. for (var j = 0; j < this.blurVPostProcesses.length; j++) {
  853. this.blurVPostProcesses[j].dispose(camera);
  854. }
  855. }
  856. this.originalPostProcess = null;
  857. this.downSampleX4PostProcess = null;
  858. this.brightPassPostProcess = null;
  859. this.textureAdderPostProcess = null;
  860. this.textureAdderFinalPostProcess = null;
  861. this.volumetricLightPostProcess = null;
  862. this.volumetricLightSmoothXPostProcess = null;
  863. this.volumetricLightSmoothYPostProcess = null;
  864. this.volumetricLightMergePostProces = null;
  865. this.volumetricLightFinalPostProcess = null;
  866. this.lensFlarePostProcess = null;
  867. this.lensFlareComposePostProcess = null;
  868. this.luminancePostProcess = null;
  869. this.hdrPostProcess = null;
  870. this.hdrFinalPostProcess = null;
  871. this.depthOfFieldPostProcess = null;
  872. this.motionBlurPostProcess = null;
  873. this.fxaaPostProcess = null;
  874. this.luminanceDownSamplePostProcesses = [];
  875. this.blurHPostProcesses = [];
  876. this.blurVPostProcesses = [];
  877. }
  878. /**
  879. * Dispose of the pipeline and stop all post processes
  880. */
  881. public dispose(): void {
  882. this._disposePostProcesses();
  883. this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
  884. super.dispose();
  885. }
  886. /**
  887. * Serialize the rendering pipeline (Used when exporting)
  888. * @returns the serialized object
  889. */
  890. public serialize(): any {
  891. var serializationObject = SerializationHelper.Serialize(this);
  892. if (this.sourceLight) {
  893. serializationObject.sourceLightId = this.sourceLight.id;
  894. }
  895. serializationObject.customType = "StandardRenderingPipeline";
  896. return serializationObject;
  897. }
  898. /**
  899. * Parse the serialized pipeline
  900. * @param source Source pipeline.
  901. * @param scene The scene to load the pipeline to.
  902. * @param rootUrl The URL of the serialized pipeline.
  903. * @returns An instantiated pipeline from the serialized object.
  904. */
  905. public static Parse(source: any, scene: Scene, rootUrl: string): StandardRenderingPipeline {
  906. var p = SerializationHelper.Parse(() => new StandardRenderingPipeline(source._name, scene, source._ratio), source, scene, rootUrl);
  907. if (source.sourceLightId) {
  908. p.sourceLight = <SpotLight | DirectionalLight> scene.getLightByID(source.sourceLightId);
  909. }
  910. return p;
  911. }
  912. /**
  913. * Luminance steps
  914. */
  915. public static LuminanceSteps: number = 6;
  916. }
  917. _TypeStore.RegisteredTypes["BABYLON.StandardRenderingPipeline"] = StandardRenderingPipeline;