babylon.standardRenderingPipeline.ts 48 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083
  1. module BABYLON {
  2. /**
  3. * Standard rendering pipeline
  4. * Default pipeline should be used going forward but the standard pipeline will be kept for backwards compatibility.
  5. * @see https://doc.babylonjs.com/how_to/using_standard_rendering_pipeline
  6. */
  7. export class StandardRenderingPipeline extends PostProcessRenderPipeline implements IDisposable, IAnimatable {
  8. /**
  9. * Public members
  10. */
  11. // Post-processes
  12. /**
  13. * Post-process which contains the original scene color before the pipeline applies all the effects
  14. */
  15. public originalPostProcess: Nullable<PostProcess>;
  16. /**
  17. * Post-process used to down scale an image x4
  18. */
  19. public downSampleX4PostProcess: Nullable<PostProcess> = null;
  20. /**
  21. * Post-process used to calculate the illuminated surfaces controlled by a threshold
  22. */
  23. public brightPassPostProcess: Nullable<PostProcess> = null;
  24. /**
  25. * Post-process array storing all the horizontal blur post-processes used by the pipeline
  26. */
  27. public blurHPostProcesses: PostProcess[] = [];
  28. /**
  29. * Post-process array storing all the vertical blur post-processes used by the pipeline
  30. */
  31. public blurVPostProcesses: PostProcess[] = [];
  32. /**
  33. * Post-process used to add colors of 2 textures (typically brightness + real scene color)
  34. */
  35. public textureAdderPostProcess: Nullable<PostProcess> = null;
  36. /**
  37. * Post-process used to create volumetric lighting effect
  38. */
  39. public volumetricLightPostProcess: Nullable<PostProcess> = null;
  40. /**
  41. * Post-process used to smooth the previous volumetric light post-process on the X axis
  42. */
  43. public volumetricLightSmoothXPostProcess: Nullable<BlurPostProcess> = null;
  44. /**
  45. * Post-process used to smooth the previous volumetric light post-process on the Y axis
  46. */
  47. public volumetricLightSmoothYPostProcess: Nullable<BlurPostProcess> = null;
  48. /**
  49. * Post-process used to merge the volumetric light effect and the real scene color
  50. */
  51. public volumetricLightMergePostProces: Nullable<PostProcess> = null;
  52. /**
  53. * Post-process used to store the final volumetric light post-process (attach/detach for debug purpose)
  54. */
  55. public volumetricLightFinalPostProcess: Nullable<PostProcess> = null;
  56. /**
  57. * Base post-process used to calculate the average luminance of the final image for HDR
  58. */
  59. public luminancePostProcess: Nullable<PostProcess> = null;
  60. /**
  61. * Post-processes used to create down sample post-processes in order to get
  62. * the average luminance of the final image for HDR
  63. * Array of length "StandardRenderingPipeline.LuminanceSteps"
  64. */
  65. public luminanceDownSamplePostProcesses: PostProcess[] = [];
  66. /**
  67. * Post-process used to create a HDR effect (light adaptation)
  68. */
  69. public hdrPostProcess: Nullable<PostProcess> = null;
  70. /**
  71. * Post-process used to store the final texture adder post-process (attach/detach for debug purpose)
  72. */
  73. public textureAdderFinalPostProcess: Nullable<PostProcess> = null;
  74. /**
  75. * Post-process used to store the final lens flare post-process (attach/detach for debug purpose)
  76. */
  77. public lensFlareFinalPostProcess: Nullable<PostProcess> = null;
  78. /**
  79. * Post-process used to merge the final HDR post-process and the real scene color
  80. */
  81. public hdrFinalPostProcess: Nullable<PostProcess> = null;
  82. /**
  83. * Post-process used to create a lens flare effect
  84. */
  85. public lensFlarePostProcess: Nullable<PostProcess> = null;
  86. /**
  87. * Post-process that merges the result of the lens flare post-process and the real scene color
  88. */
  89. public lensFlareComposePostProcess: Nullable<PostProcess> = null;
  90. /**
  91. * Post-process used to create a motion blur effect
  92. */
  93. public motionBlurPostProcess: Nullable<PostProcess> = null;
  94. /**
  95. * Post-process used to create a depth of field effect
  96. */
  97. public depthOfFieldPostProcess: Nullable<PostProcess> = null;
  98. /**
  99. * The Fast Approximate Anti-Aliasing post process which attemps to remove aliasing from an image.
  100. */
  101. public fxaaPostProcess: Nullable<FxaaPostProcess> = null;
  102. // Values
  103. /**
  104. * Represents the brightness threshold in order to configure the illuminated surfaces
  105. */
  106. @serialize()
  107. public brightThreshold: number = 1.0;
  108. /**
  109. * Configures the blur intensity used for surexposed surfaces are highlighted surfaces (light halo)
  110. */
  111. @serialize()
  112. public blurWidth: number = 512.0;
  113. /**
  114. * Sets if the blur for highlighted surfaces must be only horizontal
  115. */
  116. @serialize()
  117. public horizontalBlur: boolean = false;
  118. /**
  119. * Sets the overall exposure used by the pipeline
  120. */
  121. @serialize()
  122. public exposure: number = 1.0;
  123. /**
  124. * Texture used typically to simulate "dirty" on camera lens
  125. */
  126. @serializeAsTexture("lensTexture")
  127. public lensTexture: Nullable<Texture> = null;
  128. /**
  129. * Represents the offset coefficient based on Rayleigh principle. Typically in interval [-0.2, 0.2]
  130. */
  131. @serialize()
  132. public volumetricLightCoefficient: number = 0.2;
  133. /**
  134. * The overall power of volumetric lights, typically in interval [0, 10] maximum
  135. */
  136. @serialize()
  137. public volumetricLightPower: number = 4.0;
  138. /**
  139. * Used the set the blur intensity to smooth the volumetric lights
  140. */
  141. @serialize()
  142. public volumetricLightBlurScale: number = 64.0;
  143. /**
  144. * Light (spot or directional) used to generate the volumetric lights rays
  145. * The source light must have a shadow generate so the pipeline can get its
  146. * depth map
  147. */
  148. public sourceLight: Nullable<SpotLight |  DirectionalLight> = null;
  149. /**
  150. * For eye adaptation, represents the minimum luminance the eye can see
  151. */
  152. @serialize()
  153. public hdrMinimumLuminance: number = 1.0;
  154. /**
  155. * For eye adaptation, represents the decrease luminance speed
  156. */
  157. @serialize()
  158. public hdrDecreaseRate: number = 0.5;
  159. /**
  160. * For eye adaptation, represents the increase luminance speed
  161. */
  162. @serialize()
  163. public hdrIncreaseRate: number = 0.5;
  164. /**
  165. * Lens color texture used by the lens flare effect. Mandatory if lens flare effect enabled
  166. */
  167. @serializeAsTexture("lensColorTexture")
  168. public lensColorTexture: Nullable<Texture> = null;
  169. /**
  170. * The overall strengh for the lens flare effect
  171. */
  172. @serialize()
  173. public lensFlareStrength: number = 20.0;
  174. /**
  175. * Dispersion coefficient for lens flare ghosts
  176. */
  177. @serialize()
  178. public lensFlareGhostDispersal: number = 1.4;
  179. /**
  180. * Main lens flare halo width
  181. */
  182. @serialize()
  183. public lensFlareHaloWidth: number = 0.7;
  184. /**
  185. * Based on the lens distortion effect, defines how much the lens flare result
  186. * is distorted
  187. */
  188. @serialize()
  189. public lensFlareDistortionStrength: number = 16.0;
  190. /**
  191. * Lens star texture must be used to simulate rays on the flares and is available
  192. * in the documentation
  193. */
  194. @serializeAsTexture("lensStarTexture")
  195. public lensStarTexture: Nullable<Texture> = null;
  196. /**
  197. * As the "lensTexture" (can be the same texture or different), it is used to apply the lens
  198. * flare effect by taking account of the dirt texture
  199. */
  200. @serializeAsTexture("lensFlareDirtTexture")
  201. public lensFlareDirtTexture: Nullable<Texture> = null;
  202. /**
  203. * Represents the focal length for the depth of field effect
  204. */
  205. @serialize()
  206. public depthOfFieldDistance: number = 10.0;
  207. /**
  208. * Represents the blur intensity for the blurred part of the depth of field effect
  209. */
  210. @serialize()
  211. public depthOfFieldBlurWidth: number = 64.0;
  212. /**
  213. * For motion blur, defines how much the image is blurred by the movement
  214. */
  215. @serialize()
  216. public motionStrength: number = 1.0;
  217. /**
  218. * List of animations for the pipeline (IAnimatable implementation)
  219. */
  220. public animations: Animation[] = [];
  221. /**
  222. * Private members
  223. */
  224. private _scene: Scene;
  225. private _currentDepthOfFieldSource: Nullable<PostProcess> = null;
  226. private _basePostProcess: Nullable<PostProcess>;
  227. private _hdrCurrentLuminance: number = 1.0;
  228. private _floatTextureType: number;
  229. @serialize()
  230. private _ratio: number;
  231. // Getters and setters
  232. private _bloomEnabled: boolean = false;
  233. private _depthOfFieldEnabled: boolean = false;
  234. private _vlsEnabled: boolean = false;
  235. private _lensFlareEnabled: boolean = false;
  236. private _hdrEnabled: boolean = false;
  237. private _motionBlurEnabled: boolean = false;
  238. private _fxaaEnabled: boolean = false;
  239. private _motionBlurSamples: number = 64.0;
  240. private _volumetricLightStepsCount: number = 50.0;
  241. private _samples: number = 1;
  242. /**
  243. * Specifies if the bloom pipeline is enabled
  244. */
  245. @serialize()
  246. public get BloomEnabled(): boolean {
  247. return this._bloomEnabled;
  248. }
  249. public set BloomEnabled(enabled: boolean) {
  250. if (this._bloomEnabled === enabled) {
  251. return;
  252. }
  253. this._bloomEnabled = enabled;
  254. this._buildPipeline();
  255. }
  256. /**
  257. * Specifies if the depth of field pipeline is enabed
  258. */
  259. @serialize()
  260. public get DepthOfFieldEnabled(): boolean {
  261. return this._depthOfFieldEnabled;
  262. }
  263. public set DepthOfFieldEnabled(enabled: boolean) {
  264. if (this._depthOfFieldEnabled === enabled) {
  265. return;
  266. }
  267. this._depthOfFieldEnabled = enabled;
  268. this._buildPipeline();
  269. }
  270. /**
  271. * Specifies if the lens flare pipeline is enabed
  272. */
  273. @serialize()
  274. public get LensFlareEnabled(): boolean {
  275. return this._lensFlareEnabled;
  276. }
  277. public set LensFlareEnabled(enabled: boolean) {
  278. if (this._lensFlareEnabled === enabled) {
  279. return;
  280. }
  281. this._lensFlareEnabled = enabled;
  282. this._buildPipeline();
  283. }
  284. /**
  285. * Specifies if the HDR pipeline is enabled
  286. */
  287. @serialize()
  288. public get HDREnabled(): boolean {
  289. return this._hdrEnabled;
  290. }
  291. public set HDREnabled(enabled: boolean) {
  292. if (this._hdrEnabled === enabled) {
  293. return;
  294. }
  295. this._hdrEnabled = enabled;
  296. this._buildPipeline();
  297. }
  298. /**
  299. * Specifies if the volumetric lights scattering effect is enabled
  300. */
  301. @serialize()
  302. public get VLSEnabled(): boolean {
  303. return this._vlsEnabled;
  304. }
  305. public set VLSEnabled(enabled) {
  306. if (this._vlsEnabled === enabled) {
  307. return;
  308. }
  309. if (enabled) {
  310. var geometry = this._scene.enableGeometryBufferRenderer();
  311. if (!geometry) {
  312. Tools.Warn("Geometry renderer is not supported, cannot create volumetric lights in Standard Rendering Pipeline");
  313. return;
  314. }
  315. }
  316. this._vlsEnabled = enabled;
  317. this._buildPipeline();
  318. }
  319. /**
  320. * Specifies if the motion blur effect is enabled
  321. */
  322. @serialize()
  323. public get MotionBlurEnabled(): boolean {
  324. return this._motionBlurEnabled;
  325. }
  326. public set MotionBlurEnabled(enabled: boolean) {
  327. if (this._motionBlurEnabled === enabled) {
  328. return;
  329. }
  330. this._motionBlurEnabled = enabled;
  331. this._buildPipeline();
  332. }
  333. /**
  334. * Specifies if anti-aliasing is enabled
  335. */
  336. @serialize()
  337. public get fxaaEnabled(): boolean {
  338. return this._fxaaEnabled;
  339. }
  340. public set fxaaEnabled(enabled: boolean) {
  341. if (this._fxaaEnabled === enabled) {
  342. return;
  343. }
  344. this._fxaaEnabled = enabled;
  345. this._buildPipeline();
  346. }
  347. /**
  348. * Specifies the number of steps used to calculate the volumetric lights
  349. * Typically in interval [50, 200]
  350. */
  351. @serialize()
  352. public get volumetricLightStepsCount(): number {
  353. return this._volumetricLightStepsCount;
  354. }
  355. public set volumetricLightStepsCount(count: number)  {
  356. if (this.volumetricLightPostProcess) {
  357. this.volumetricLightPostProcess.updateEffect("#define VLS\n#define NB_STEPS " + count.toFixed(1));
  358. }
  359. this._volumetricLightStepsCount = count;
  360. }
  361. /**
  362. * Specifies the number of samples used for the motion blur effect
  363. * Typically in interval [16, 64]
  364. */
  365. @serialize()
  366. public get motionBlurSamples(): number {
  367. return this._motionBlurSamples;
  368. }
  369. public set motionBlurSamples(samples: number) {
  370. if (this.motionBlurPostProcess) {
  371. this.motionBlurPostProcess.updateEffect("#define MOTION_BLUR\n#define MAX_MOTION_SAMPLES " + samples.toFixed(1));
  372. }
  373. this._motionBlurSamples = samples;
  374. }
  375. /**
  376. * Specifies MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1)
  377. */
  378. @serialize()
  379. public get samples(): number {
  380. return this._samples;
  381. }
  382. public set samples(sampleCount: number) {
  383. if (this._samples === sampleCount) {
  384. return;
  385. }
  386. this._samples = sampleCount;
  387. this._buildPipeline();
  388. }
  389. /**
  390. * Default pipeline should be used going forward but the standard pipeline will be kept for backwards compatibility.
  391. * @constructor
  392. * @param {string} name - The rendering pipeline name
  393. * @param {BABYLON.Scene} scene - The scene linked to this pipeline
  394. * @param {any} ratio - The size of the postprocesses (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5)
  395. * @param {BABYLON.PostProcess} originalPostProcess - the custom original color post-process. Must be "reusable". Can be null.
  396. * @param {BABYLON.Camera[]} cameras - The array of cameras that the rendering pipeline will be attached to
  397. */
  398. constructor(name: string, scene: Scene, ratio: number, originalPostProcess: Nullable<PostProcess> = null, cameras?: Camera[]) {
  399. super(scene.getEngine(), name);
  400. this._cameras = cameras ||  [];
  401. // Initialize
  402. this._scene = scene;
  403. this._basePostProcess = originalPostProcess;
  404. this._ratio = ratio;
  405. // Misc
  406. this._floatTextureType = scene.getEngine().getCaps().textureFloatRender ? Engine.TEXTURETYPE_FLOAT : Engine.TEXTURETYPE_HALF_FLOAT;
  407. // Finish
  408. scene.postProcessRenderPipelineManager.addPipeline(this);
  409. this._buildPipeline();
  410. }
  411. private _buildPipeline(): void {
  412. var ratio = this._ratio;
  413. var scene = this._scene;
  414. this._disposePostProcesses();
  415. this._reset();
  416. // Create pass post-process
  417. if (!this._basePostProcess) {
  418. this.originalPostProcess = new PostProcess("HDRPass", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", this._floatTextureType);
  419. this.originalPostProcess.onApply = (effect: Effect) => {
  420. this._currentDepthOfFieldSource = this.originalPostProcess;
  421. };
  422. }
  423. else {
  424. this.originalPostProcess = this._basePostProcess;
  425. }
  426. if (this._bloomEnabled || this._vlsEnabled || this._lensFlareEnabled || this._depthOfFieldEnabled || this._motionBlurEnabled) {
  427. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPassPostProcess", () => { return this.originalPostProcess; }, true));
  428. }
  429. this._currentDepthOfFieldSource = this.originalPostProcess;
  430. if (this._bloomEnabled) {
  431. // Create down sample X4 post-process
  432. this._createDownSampleX4PostProcess(scene, ratio / 2);
  433. // Create bright pass post-process
  434. this._createBrightPassPostProcess(scene, ratio / 2);
  435. // Create gaussian blur post-processes (down sampling blurs)
  436. this._createBlurPostProcesses(scene, ratio / 4, 1);
  437. // Create texture adder post-process
  438. this._createTextureAdderPostProcess(scene, ratio);
  439. // Create depth-of-field source post-process
  440. this.textureAdderFinalPostProcess = new PostProcess("HDRDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Engine.TEXTURETYPE_UNSIGNED_INT);
  441. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBaseDepthOfFieldSource", () => { return this.textureAdderFinalPostProcess; }, true));
  442. }
  443. if (this._vlsEnabled) {
  444. // Create volumetric light
  445. this._createVolumetricLightPostProcess(scene, ratio);
  446. // Create volumetric light final post-process
  447. this.volumetricLightFinalPostProcess = new PostProcess("HDRVLSFinal", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Engine.TEXTURETYPE_UNSIGNED_INT);
  448. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSFinal", () => { return this.volumetricLightFinalPostProcess; }, true));
  449. }
  450. if (this._lensFlareEnabled) {
  451. // Create lens flare post-process
  452. this._createLensFlarePostProcess(scene, ratio);
  453. // Create depth-of-field source post-process post lens-flare and disable it now
  454. this.lensFlareFinalPostProcess = new PostProcess("HDRPostLensFlareDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Engine.TEXTURETYPE_UNSIGNED_INT);
  455. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostLensFlareDepthOfFieldSource", () => { return this.lensFlareFinalPostProcess; }, true));
  456. }
  457. if (this._hdrEnabled) {
  458. // Create luminance
  459. this._createLuminancePostProcesses(scene, this._floatTextureType);
  460. // Create HDR
  461. this._createHdrPostProcess(scene, ratio);
  462. // Create depth-of-field source post-process post hdr and disable it now
  463. this.hdrFinalPostProcess = new PostProcess("HDRPostHDReDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Engine.TEXTURETYPE_UNSIGNED_INT);
  464. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostHDReDepthOfFieldSource", () => { return this.hdrFinalPostProcess; }, true));
  465. }
  466. if (this._depthOfFieldEnabled) {
  467. // Create gaussian blur used by depth-of-field
  468. this._createBlurPostProcesses(scene, ratio / 2, 3, "depthOfFieldBlurWidth");
  469. // Create depth-of-field post-process
  470. this._createDepthOfFieldPostProcess(scene, ratio);
  471. }
  472. if (this._motionBlurEnabled) {
  473. // Create motion blur post-process
  474. this._createMotionBlurPostProcess(scene, ratio);
  475. }
  476. if (this._fxaaEnabled) {
  477. // Create fxaa post-process
  478. this.fxaaPostProcess = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Engine.TEXTURETYPE_UNSIGNED_INT);
  479. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRFxaa", () => { return this.fxaaPostProcess; }, true));
  480. }
  481. if (this._cameras !== null) {
  482. this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
  483. }
  484. if (!this._enableMSAAOnFirstPostProcess(this._samples) && this._samples > 1){
  485. BABYLON.Tools.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0");
  486. }
  487. }
  488. // Down Sample X4 Post-Processs
  489. private _createDownSampleX4PostProcess(scene: Scene, ratio: number): void {
  490. var downSampleX4Offsets = new Array<number>(32);
  491. this.downSampleX4PostProcess = new PostProcess("HDRDownSampleX4", "standard", ["dsOffsets"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DOWN_SAMPLE_X4", Engine.TEXTURETYPE_UNSIGNED_INT);
  492. this.downSampleX4PostProcess.onApply = (effect: Effect) => {
  493. var id = 0;
  494. let width = (<PostProcess>this.downSampleX4PostProcess).width;
  495. let height = (<PostProcess>this.downSampleX4PostProcess).height;
  496. for (var i = -2; i < 2; i++) {
  497. for (var j = -2; j < 2; j++) {
  498. downSampleX4Offsets[id] = (i + 0.5) * (1.0 / width);
  499. downSampleX4Offsets[id + 1] = (j + 0.5) * (1.0 / height);
  500. id += 2;
  501. }
  502. }
  503. effect.setArray2("dsOffsets", downSampleX4Offsets);
  504. };
  505. // Add to pipeline
  506. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDownSampleX4", () => { return this.downSampleX4PostProcess; }, true));
  507. }
  508. // Brightpass Post-Process
  509. private _createBrightPassPostProcess(scene: Scene, ratio: number): void {
  510. var brightOffsets = new Array<number>(8);
  511. this.brightPassPostProcess = new PostProcess("HDRBrightPass", "standard", ["dsOffsets", "brightThreshold"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define BRIGHT_PASS", Engine.TEXTURETYPE_UNSIGNED_INT);
  512. this.brightPassPostProcess.onApply = (effect: Effect) => {
  513. var sU = (1.0 / (<PostProcess>this.brightPassPostProcess).width);
  514. var sV = (1.0 / (<PostProcess>this.brightPassPostProcess).height);
  515. brightOffsets[0] = -0.5 * sU;
  516. brightOffsets[1] = 0.5 * sV;
  517. brightOffsets[2] = 0.5 * sU;
  518. brightOffsets[3] = 0.5 * sV;
  519. brightOffsets[4] = -0.5 * sU;
  520. brightOffsets[5] = -0.5 * sV;
  521. brightOffsets[6] = 0.5 * sU;
  522. brightOffsets[7] = -0.5 * sV;
  523. effect.setArray2("dsOffsets", brightOffsets);
  524. effect.setFloat("brightThreshold", this.brightThreshold);
  525. }
  526. // Add to pipeline
  527. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBrightPass", () => { return this.brightPassPostProcess; }, true));
  528. }
  529. // Create blur H&V post-processes
  530. private _createBlurPostProcesses(scene: Scene, ratio: number, indice: number, blurWidthKey: string = "blurWidth"): void {
  531. var engine = scene.getEngine();
  532. var blurX = new BlurPostProcess("HDRBlurH" + "_" + indice, new Vector2(1, 0), (<any>this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Engine.TEXTURETYPE_UNSIGNED_INT);
  533. var blurY = new BlurPostProcess("HDRBlurV" + "_" + indice, new Vector2(0, 1), (<any>this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Engine.TEXTURETYPE_UNSIGNED_INT);
  534. blurX.onActivateObservable.add(() => {
  535. let dw = blurX.width / engine.getRenderWidth();
  536. blurX.kernel = (<any>this)[blurWidthKey] * dw;
  537. });
  538. blurY.onActivateObservable.add(() => {
  539. let dw = blurY.height / engine.getRenderHeight();
  540. blurY.kernel = this.horizontalBlur ? 64 * dw : (<any>this)[blurWidthKey] * dw;
  541. });
  542. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBlurH" + indice, () => { return blurX; }, true));
  543. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBlurV" + indice, () => { return blurY; }, true));
  544. this.blurHPostProcesses.push(blurX);
  545. this.blurVPostProcesses.push(blurY);
  546. }
  547. // Create texture adder post-process
  548. private _createTextureAdderPostProcess(scene: Scene, ratio: number): void {
  549. this.textureAdderPostProcess = new PostProcess("HDRTextureAdder", "standard", ["exposure"], ["otherSampler", "lensSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define TEXTURE_ADDER", Engine.TEXTURETYPE_UNSIGNED_INT);
  550. this.textureAdderPostProcess.onApply = (effect: Effect) => {
  551. effect.setTextureFromPostProcess("otherSampler", this._vlsEnabled ? this._currentDepthOfFieldSource : this.originalPostProcess);
  552. effect.setTexture("lensSampler", this.lensTexture);
  553. effect.setFloat("exposure", this.exposure);
  554. this._currentDepthOfFieldSource = this.textureAdderFinalPostProcess;
  555. };
  556. // Add to pipeline
  557. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRTextureAdder", () => { return this.textureAdderPostProcess; }, true));
  558. }
  559. private _createVolumetricLightPostProcess(scene: Scene, ratio: number): void {
  560. var geometryRenderer = <GeometryBufferRenderer>scene.enableGeometryBufferRenderer();
  561. geometryRenderer.enablePosition = true;
  562. var geometry = geometryRenderer.getGBuffer();
  563. // Base post-process
  564. this.volumetricLightPostProcess = new PostProcess("HDRVLS", "standard",
  565. ["shadowViewProjection", "cameraPosition", "sunDirection", "sunColor", "scatteringCoefficient", "scatteringPower", "depthValues"],
  566. ["shadowMapSampler", "positionSampler"],
  567. ratio / 8,
  568. null,
  569. Texture.BILINEAR_SAMPLINGMODE,
  570. scene.getEngine(),
  571. false, "#define VLS\n#define NB_STEPS " + this._volumetricLightStepsCount.toFixed(1));
  572. var depthValues = Vector2.Zero();
  573. this.volumetricLightPostProcess.onApply = (effect: Effect) => {
  574. if (this.sourceLight && this.sourceLight.getShadowGenerator() && this._scene.activeCamera) {
  575. var generator = this.sourceLight.getShadowGenerator()!;
  576. effect.setTexture("shadowMapSampler", generator.getShadowMap());
  577. effect.setTexture("positionSampler", geometry.textures[2]);
  578. effect.setColor3("sunColor", this.sourceLight.diffuse);
  579. effect.setVector3("sunDirection", this.sourceLight.getShadowDirection());
  580. effect.setVector3("cameraPosition", this._scene.activeCamera.globalPosition);
  581. effect.setMatrix("shadowViewProjection", generator.getTransformMatrix());
  582. effect.setFloat("scatteringCoefficient", this.volumetricLightCoefficient);
  583. effect.setFloat("scatteringPower", this.volumetricLightPower);
  584. depthValues.x = this.sourceLight.getDepthMinZ(this._scene.activeCamera);
  585. depthValues.y = this.sourceLight.getDepthMaxZ(this._scene.activeCamera);
  586. effect.setVector2("depthValues", depthValues);
  587. }
  588. };
  589. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLS", () => { return this.volumetricLightPostProcess; }, true));
  590. // Smooth
  591. this._createBlurPostProcesses(scene, ratio / 4, 0, "volumetricLightBlurScale");
  592. // Merge
  593. this.volumetricLightMergePostProces = new PostProcess("HDRVLSMerge", "standard", [], ["originalSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define VLSMERGE");
  594. this.volumetricLightMergePostProces.onApply = (effect: Effect) => {
  595. effect.setTextureFromPostProcess("originalSampler", this._bloomEnabled ? this.textureAdderFinalPostProcess : this.originalPostProcess);
  596. this._currentDepthOfFieldSource = this.volumetricLightFinalPostProcess;
  597. };
  598. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSMerge", () => { return this.volumetricLightMergePostProces; }, true));
  599. }
  600. // Create luminance
  601. private _createLuminancePostProcesses(scene: Scene, textureType: number): void {
  602. // Create luminance
  603. var size = Math.pow(3, StandardRenderingPipeline.LuminanceSteps);
  604. this.luminancePostProcess = new PostProcess("HDRLuminance", "standard", ["lumOffsets"], [], { width: size, height: size }, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LUMINANCE", textureType);
  605. var offsets: number[] = [];
  606. this.luminancePostProcess.onApply = (effect: Effect) => {
  607. var sU = (1.0 / (<PostProcess>this.luminancePostProcess).width);
  608. var sV = (1.0 / (<PostProcess>this.luminancePostProcess).height);
  609. offsets[0] = -0.5 * sU;
  610. offsets[1] = 0.5 * sV;
  611. offsets[2] = 0.5 * sU;
  612. offsets[3] = 0.5 * sV;
  613. offsets[4] = -0.5 * sU;
  614. offsets[5] = -0.5 * sV;
  615. offsets[6] = 0.5 * sU;
  616. offsets[7] = -0.5 * sV;
  617. effect.setArray2("lumOffsets", offsets);
  618. };
  619. // Add to pipeline
  620. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLuminance", () => { return this.luminancePostProcess; }, true));
  621. // Create down sample luminance
  622. for (var i = StandardRenderingPipeline.LuminanceSteps - 1; i >= 0; i--) {
  623. var size = Math.pow(3, i);
  624. var defines = "#define LUMINANCE_DOWN_SAMPLE\n";
  625. if (i === 0) {
  626. defines += "#define FINAL_DOWN_SAMPLER";
  627. }
  628. var postProcess = new PostProcess("HDRLuminanceDownSample" + i, "standard", ["dsOffsets", "halfDestPixelSize"], [], { width: size, height: size }, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, defines, textureType);
  629. this.luminanceDownSamplePostProcesses.push(postProcess);
  630. }
  631. // Create callbacks and add effects
  632. var lastLuminance: Nullable<PostProcess> = this.luminancePostProcess;
  633. this.luminanceDownSamplePostProcesses.forEach((pp, index) => {
  634. var downSampleOffsets = new Array<number>(18);
  635. pp.onApply = (effect: Effect) => {
  636. if (!lastLuminance) {
  637. return;
  638. }
  639. var id = 0;
  640. for (var x = -1; x < 2; x++) {
  641. for (var y = -1; y < 2; y++) {
  642. downSampleOffsets[id] = x / lastLuminance.width;
  643. downSampleOffsets[id + 1] = y / lastLuminance.height;
  644. id += 2;
  645. }
  646. }
  647. effect.setArray2("dsOffsets", downSampleOffsets);
  648. effect.setFloat("halfDestPixelSize", 0.5 / lastLuminance.width);
  649. if (index === this.luminanceDownSamplePostProcesses.length - 1) {
  650. lastLuminance = this.luminancePostProcess;
  651. } else {
  652. lastLuminance = pp;
  653. }
  654. };
  655. if (index === this.luminanceDownSamplePostProcesses.length - 1) {
  656. pp.onAfterRender = (effect: Effect) => {
  657. var pixel = scene.getEngine().readPixels(0, 0, 1, 1);
  658. var bit_shift = new Vector4(1.0 / (255.0 * 255.0 * 255.0), 1.0 / (255.0 * 255.0), 1.0 / 255.0, 1.0);
  659. this._hdrCurrentLuminance = (pixel[0] * bit_shift.x + pixel[1] * bit_shift.y + pixel[2] * bit_shift.z + pixel[3] * bit_shift.w) / 100.0;
  660. };
  661. }
  662. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLuminanceDownSample" + index, () => { return pp; }, true));
  663. });
  664. }
  665. // Create HDR post-process
  666. private _createHdrPostProcess(scene: Scene, ratio: number): void {
  667. this.hdrPostProcess = new PostProcess("HDR", "standard", ["averageLuminance"], ["textureAdderSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define HDR", Engine.TEXTURETYPE_UNSIGNED_INT);
  668. var outputLiminance = 1;
  669. var time = 0;
  670. var lastTime = 0;
  671. this.hdrPostProcess.onApply = (effect: Effect) => {
  672. effect.setTextureFromPostProcess("textureAdderSampler", this._currentDepthOfFieldSource);
  673. time += scene.getEngine().getDeltaTime();
  674. if (outputLiminance < 0) {
  675. outputLiminance = this._hdrCurrentLuminance;
  676. } else {
  677. var dt = (lastTime - time) / 1000.0;
  678. if (this._hdrCurrentLuminance < outputLiminance + this.hdrDecreaseRate * dt) {
  679. outputLiminance += this.hdrDecreaseRate * dt;
  680. }
  681. else if (this._hdrCurrentLuminance > outputLiminance - this.hdrIncreaseRate * dt) {
  682. outputLiminance -= this.hdrIncreaseRate * dt;
  683. }
  684. else {
  685. outputLiminance = this._hdrCurrentLuminance;
  686. }
  687. }
  688. outputLiminance = Scalar.Clamp(outputLiminance, this.hdrMinimumLuminance, 1e20);
  689. effect.setFloat("averageLuminance", outputLiminance);
  690. lastTime = time;
  691. this._currentDepthOfFieldSource = this.hdrFinalPostProcess;
  692. };
  693. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDR", () => { return this.hdrPostProcess; }, true));
  694. }
  695. // Create lens flare post-process
  696. private _createLensFlarePostProcess(scene: Scene, ratio: number): void {
  697. this.lensFlarePostProcess = new PostProcess("HDRLensFlare", "standard", ["strength", "ghostDispersal", "haloWidth", "resolution", "distortionStrength"], ["lensColorSampler"], ratio / 2, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE", Engine.TEXTURETYPE_UNSIGNED_INT);
  698. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlare", () => { return this.lensFlarePostProcess; }, true));
  699. this._createBlurPostProcesses(scene, ratio / 4, 2);
  700. this.lensFlareComposePostProcess = new PostProcess("HDRLensFlareCompose", "standard", ["lensStarMatrix"], ["otherSampler", "lensDirtSampler", "lensStarSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE_COMPOSE", Engine.TEXTURETYPE_UNSIGNED_INT);
  701. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlareCompose", () => { return this.lensFlareComposePostProcess; }, true));
  702. var resolution = new Vector2(0, 0);
  703. // Lens flare
  704. this.lensFlarePostProcess.onApply = (effect: Effect) => {
  705. effect.setTextureFromPostProcess("textureSampler", this._bloomEnabled ? this.blurHPostProcesses[0] : this.originalPostProcess);
  706. effect.setTexture("lensColorSampler", this.lensColorTexture);
  707. effect.setFloat("strength", this.lensFlareStrength);
  708. effect.setFloat("ghostDispersal", this.lensFlareGhostDispersal);
  709. effect.setFloat("haloWidth", this.lensFlareHaloWidth);
  710. // Shift
  711. resolution.x = (<PostProcess>this.lensFlarePostProcess).width;
  712. resolution.y = (<PostProcess>this.lensFlarePostProcess).height;
  713. effect.setVector2("resolution", resolution);
  714. effect.setFloat("distortionStrength", this.lensFlareDistortionStrength);
  715. };
  716. // Compose
  717. var scaleBias1 = Matrix.FromValues(
  718. 2.0, 0.0, -1.0, 0.0,
  719. 0.0, 2.0, -1.0, 0.0,
  720. 0.0, 0.0, 1.0, 0.0,
  721. 0.0, 0.0, 0.0, 1.0
  722. );
  723. var scaleBias2 = Matrix.FromValues(
  724. 0.5, 0.0, 0.5, 0.0,
  725. 0.0, 0.5, 0.5, 0.0,
  726. 0.0, 0.0, 1.0, 0.0,
  727. 0.0, 0.0, 0.0, 1.0
  728. );
  729. this.lensFlareComposePostProcess.onApply = (effect: Effect) => {
  730. if (!this._scene.activeCamera) {
  731. return;
  732. }
  733. effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource);
  734. effect.setTexture("lensDirtSampler", this.lensFlareDirtTexture);
  735. effect.setTexture("lensStarSampler", this.lensStarTexture);
  736. // Lens start rotation matrix
  737. var camerax = (<Vector4>this._scene.activeCamera.getViewMatrix().getRow(0));
  738. var cameraz = (<Vector4>this._scene.activeCamera.getViewMatrix().getRow(2));
  739. var camRot = Vector3.Dot(camerax.toVector3(), new Vector3(1.0, 0.0, 0.0)) + Vector3.Dot(cameraz.toVector3(), new Vector3(0.0, 0.0, 1.0));
  740. camRot *= 4.0;
  741. var starRotation = Matrix.FromValues(
  742. Math.cos(camRot) * 0.5, -Math.sin(camRot), 0.0, 0.0,
  743. Math.sin(camRot), Math.cos(camRot) * 0.5, 0.0, 0.0,
  744. 0.0, 0.0, 1.0, 0.0,
  745. 0.0, 0.0, 0.0, 1.0
  746. );
  747. var lensStarMatrix = scaleBias2.multiply(starRotation).multiply(scaleBias1);
  748. effect.setMatrix("lensStarMatrix", lensStarMatrix);
  749. this._currentDepthOfFieldSource = this.lensFlareFinalPostProcess;
  750. };
  751. }
  752. // Create depth-of-field post-process
  753. private _createDepthOfFieldPostProcess(scene: Scene, ratio: number): void {
  754. this.depthOfFieldPostProcess = new PostProcess("HDRDepthOfField", "standard", ["distance"], ["otherSampler", "depthSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DEPTH_OF_FIELD", Engine.TEXTURETYPE_UNSIGNED_INT);
  755. this.depthOfFieldPostProcess.onApply = (effect: Effect) => {
  756. effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource);
  757. effect.setTexture("depthSampler", this._getDepthTexture());
  758. effect.setFloat("distance", this.depthOfFieldDistance);
  759. };
  760. // Add to pipeline
  761. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDepthOfField", () => { return this.depthOfFieldPostProcess; }, true));
  762. }
  763. // Create motion blur post-process
  764. private _createMotionBlurPostProcess(scene: Scene, ratio: number): void {
  765. this.motionBlurPostProcess = new PostProcess("HDRMotionBlur", "standard",
  766. ["inverseViewProjection", "prevViewProjection", "screenSize", "motionScale", "motionStrength"],
  767. ["depthSampler"],
  768. ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define MOTION_BLUR\n#define MAX_MOTION_SAMPLES " + this.motionBlurSamples.toFixed(1), Engine.TEXTURETYPE_UNSIGNED_INT);
  769. var motionScale: number = 0;
  770. var prevViewProjection = Matrix.Identity();
  771. var invViewProjection = Matrix.Identity();
  772. var viewProjection = Matrix.Identity();
  773. var screenSize = Vector2.Zero();
  774. this.motionBlurPostProcess.onApply = (effect: Effect) => {
  775. viewProjection = scene.getProjectionMatrix().multiply(scene.getViewMatrix());
  776. viewProjection.invertToRef(invViewProjection);
  777. effect.setMatrix("inverseViewProjection", invViewProjection);
  778. effect.setMatrix("prevViewProjection", prevViewProjection);
  779. prevViewProjection = viewProjection;
  780. screenSize.x = (<PostProcess>this.motionBlurPostProcess).width;
  781. screenSize.y = (<PostProcess>this.motionBlurPostProcess).height;
  782. effect.setVector2("screenSize", screenSize);
  783. motionScale = scene.getEngine().getFps() / 60.0;
  784. effect.setFloat("motionScale", motionScale);
  785. effect.setFloat("motionStrength", this.motionStrength);
  786. effect.setTexture("depthSampler", this._getDepthTexture());
  787. };
  788. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRMotionBlur", () => { return this.motionBlurPostProcess; }, true));
  789. }
  790. private _getDepthTexture(): Texture {
  791. if (this._scene.getEngine().getCaps().drawBuffersExtension) {
  792. let renderer = <GeometryBufferRenderer>this._scene.enableGeometryBufferRenderer();
  793. return renderer.getGBuffer().textures[0];
  794. }
  795. return this._scene.enableDepthRenderer().getDepthMap();
  796. }
  797. private _disposePostProcesses(): void {
  798. for (var i = 0; i < this._cameras.length; i++) {
  799. var camera = this._cameras[i];
  800. if (this.originalPostProcess) { this.originalPostProcess.dispose(camera); }
  801. if (this.downSampleX4PostProcess) { this.downSampleX4PostProcess.dispose(camera); }
  802. if (this.brightPassPostProcess) { this.brightPassPostProcess.dispose(camera); }
  803. if (this.textureAdderPostProcess) { this.textureAdderPostProcess.dispose(camera); }
  804. if (this.textureAdderFinalPostProcess) { this.textureAdderFinalPostProcess.dispose(camera); }
  805. if (this.volumetricLightPostProcess) { this.volumetricLightPostProcess.dispose(camera); }
  806. if (this.volumetricLightSmoothXPostProcess) { this.volumetricLightSmoothXPostProcess.dispose(camera); }
  807. if (this.volumetricLightSmoothYPostProcess) { this.volumetricLightSmoothYPostProcess.dispose(camera); }
  808. if (this.volumetricLightMergePostProces) { this.volumetricLightMergePostProces.dispose(camera); }
  809. if (this.volumetricLightFinalPostProcess) { this.volumetricLightFinalPostProcess.dispose(camera); }
  810. if (this.lensFlarePostProcess) { this.lensFlarePostProcess.dispose(camera); }
  811. if (this.lensFlareComposePostProcess) { this.lensFlareComposePostProcess.dispose(camera); }
  812. for (var j = 0; j < this.luminanceDownSamplePostProcesses.length; j++) {
  813. this.luminanceDownSamplePostProcesses[j].dispose(camera);
  814. }
  815. if (this.luminancePostProcess) { this.luminancePostProcess.dispose(camera); }
  816. if (this.hdrPostProcess) { this.hdrPostProcess.dispose(camera); }
  817. if (this.hdrFinalPostProcess) { this.hdrFinalPostProcess.dispose(camera); }
  818. if (this.depthOfFieldPostProcess) { this.depthOfFieldPostProcess.dispose(camera); }
  819. if (this.motionBlurPostProcess) { this.motionBlurPostProcess.dispose(camera); }
  820. if (this.fxaaPostProcess) { this.fxaaPostProcess.dispose(camera); }
  821. for (var j = 0; j < this.blurHPostProcesses.length; j++) {
  822. this.blurHPostProcesses[j].dispose(camera);
  823. }
  824. for (var j = 0; j < this.blurVPostProcesses.length; j++) {
  825. this.blurVPostProcesses[j].dispose(camera);
  826. }
  827. }
  828. this.originalPostProcess = null;
  829. this.downSampleX4PostProcess = null;
  830. this.brightPassPostProcess = null;
  831. this.textureAdderPostProcess = null;
  832. this.textureAdderFinalPostProcess = null;
  833. this.volumetricLightPostProcess = null;
  834. this.volumetricLightSmoothXPostProcess = null;
  835. this.volumetricLightSmoothYPostProcess = null;
  836. this.volumetricLightMergePostProces = null;
  837. this.volumetricLightFinalPostProcess = null;
  838. this.lensFlarePostProcess = null;
  839. this.lensFlareComposePostProcess = null;
  840. this.luminancePostProcess = null;
  841. this.hdrPostProcess = null;
  842. this.hdrFinalPostProcess = null;
  843. this.depthOfFieldPostProcess = null;
  844. this.motionBlurPostProcess = null;
  845. this.fxaaPostProcess = null;
  846. this.luminanceDownSamplePostProcesses = [];
  847. this.blurHPostProcesses = [];
  848. this.blurVPostProcesses = [];
  849. }
  850. /**
  851. * Dispose of the pipeline and stop all post processes
  852. */
  853. public dispose(): void {
  854. this._disposePostProcesses();
  855. this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
  856. super.dispose();
  857. }
  858. /**
  859. * Serialize the rendering pipeline (Used when exporting)
  860. * @returns the serialized object
  861. */
  862. public serialize(): any {
  863. var serializationObject = SerializationHelper.Serialize(this);
  864. if (this.sourceLight) {
  865. serializationObject.sourceLightId = this.sourceLight.id;
  866. }
  867. serializationObject.customType = "StandardRenderingPipeline";
  868. return serializationObject;
  869. }
  870. /**
  871. * Parse the serialized pipeline
  872. * @param source Source pipeline.
  873. * @param scene The scene to load the pipeline to.
  874. * @param rootUrl The URL of the serialized pipeline.
  875. * @returns An instantiated pipeline from the serialized object.
  876. */
  877. public static Parse(source: any, scene: Scene, rootUrl: string): StandardRenderingPipeline {
  878. var p = SerializationHelper.Parse(() => new StandardRenderingPipeline(source._name, scene, source._ratio), source, scene, rootUrl);
  879. if (source.sourceLightId) {
  880. p.sourceLight = <SpotLight | DirectionalLight> scene.getLightByID(source.sourceLightId);
  881. }
  882. return p;
  883. }
  884. // Luminance steps
  885. public static LuminanceSteps: number = 6;
  886. }
  887. }