defaultRenderingPipeline.ts 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689
  1. import { Nullable } from "types";
  2. import { IAnimatable, Observer, serialize, SerializationHelper } from "Tools";
  3. import { Camera } from "Cameras";
  4. import { ImageProcessingConfiguration, Texture } from "Materials";
  5. import { PostProcess, PostProcessRenderPipeline, Scene, SharpenPostProcess, PostProcessRenderEffect, BloomEffect, DepthOfFieldEffect, FxaaPostProcess, ImageProcessingPostProcess, ChromaticAberrationPostProcess, GrainPostProcess, DepthOfFieldEffectBlurLevel } from "PostProcess";
  6. import { Engine } from "Engine";
  7. import { IDisposable } from "scene";
  8. import { GlowLayer } from "Layer";
  9. /**
  10. * The default rendering pipeline can be added to a scene to apply common post processing effects such as anti-aliasing or depth of field.
  11. * See https://doc.babylonjs.com/how_to/using_default_rendering_pipeline
  12. */
  13. export class DefaultRenderingPipeline extends PostProcessRenderPipeline implements IDisposable, IAnimatable {
  14. private _scene: Scene;
  15. private _camerasToBeAttached: Array<Camera> = [];
  16. /**
  17. * ID of the sharpen post process,
  18. */
  19. private readonly SharpenPostProcessId: string = "SharpenPostProcessEffect";
  20. /**
  21. * @ignore
  22. * ID of the image processing post process;
  23. */
  24. readonly ImageProcessingPostProcessId: string = "ImageProcessingPostProcessEffect";
  25. /**
  26. * @ignore
  27. * ID of the Fast Approximate Anti-Aliasing post process;
  28. */
  29. readonly FxaaPostProcessId: string = "FxaaPostProcessEffect";
  30. /**
  31. * ID of the chromatic aberration post process,
  32. */
  33. private readonly ChromaticAberrationPostProcessId: string = "ChromaticAberrationPostProcessEffect";
  34. /**
  35. * ID of the grain post process
  36. */
  37. private readonly GrainPostProcessId: string = "GrainPostProcessEffect";
  38. // Post-processes
  39. /**
  40. * Sharpen post process which will apply a sharpen convolution to enhance edges
  41. */
  42. public sharpen: SharpenPostProcess;
  43. private _sharpenEffect: PostProcessRenderEffect;
  44. private bloom: BloomEffect;
  45. /**
  46. * Depth of field effect, applies a blur based on how far away objects are from the focus distance.
  47. */
  48. public depthOfField: DepthOfFieldEffect;
  49. /**
  50. * The Fast Approximate Anti-Aliasing post process which attemps to remove aliasing from an image.
  51. */
  52. public fxaa: FxaaPostProcess;
  53. /**
  54. * Image post processing pass used to perform operations such as tone mapping or color grading.
  55. */
  56. public imageProcessing: ImageProcessingPostProcess;
  57. /**
  58. * Chromatic aberration post process which will shift rgb colors in the image
  59. */
  60. public chromaticAberration: ChromaticAberrationPostProcess;
  61. private _chromaticAberrationEffect: PostProcessRenderEffect;
  62. /**
  63. * Grain post process which add noise to the image
  64. */
  65. public grain: GrainPostProcess;
  66. private _grainEffect: PostProcessRenderEffect;
  67. /**
  68. * Glow post process which adds a glow to emmisive areas of the image
  69. */
  70. private _glowLayer: Nullable<GlowLayer> = null;
  71. /**
  72. * Animations which can be used to tweak settings over a period of time
  73. */
  74. public animations: Animation[] = [];
  75. private _imageProcessingConfigurationObserver: Nullable<Observer<ImageProcessingConfiguration>> = null;
  76. // Values
  77. private _sharpenEnabled: boolean = false;
  78. private _bloomEnabled: boolean = false;
  79. private _depthOfFieldEnabled: boolean = false;
  80. private _depthOfFieldBlurLevel = DepthOfFieldEffectBlurLevel.Low;
  81. private _fxaaEnabled: boolean = false;
  82. private _imageProcessingEnabled: boolean = true;
  83. private _defaultPipelineTextureType: number;
  84. private _bloomScale: number = 0.5;
  85. private _chromaticAberrationEnabled: boolean = false;
  86. private _grainEnabled: boolean = false;
  87. private _buildAllowed = true;
  88. /**
  89. * Enable or disable the sharpen process from the pipeline
  90. */
  91. public set sharpenEnabled(enabled: boolean) {
  92. if (this._sharpenEnabled === enabled) {
  93. return;
  94. }
  95. this._sharpenEnabled = enabled;
  96. this._buildPipeline();
  97. }
  98. @serialize()
  99. public get sharpenEnabled(): boolean {
  100. return this._sharpenEnabled;
  101. }
  102. private _resizeObserver: Nullable<Observer<Engine>> = null;
  103. private _hardwareScaleLevel = 1.0;
  104. private _bloomKernel: number = 64;
  105. /**
  106. * Specifies the size of the bloom blur kernel, relative to the final output size
  107. */
  108. @serialize()
  109. public get bloomKernel(): number {
  110. return this._bloomKernel;
  111. }
  112. public set bloomKernel(value: number) {
  113. this._bloomKernel = value;
  114. this.bloom.kernel = value / this._hardwareScaleLevel;
  115. }
  116. /**
  117. * Specifies the weight of the bloom in the final rendering
  118. */
  119. @serialize()
  120. private _bloomWeight: number = 0.15;
  121. /**
  122. * Specifies the luma threshold for the area that will be blurred by the bloom
  123. */
  124. @serialize()
  125. private _bloomThreshold: number = 0.9;
  126. @serialize()
  127. private _hdr: boolean;
  128. /**
  129. * The strength of the bloom.
  130. */
  131. public set bloomWeight(value: number) {
  132. if (this._bloomWeight === value) {
  133. return;
  134. }
  135. this.bloom.weight = value;
  136. this._bloomWeight = value;
  137. }
  138. @serialize()
  139. public get bloomWeight(): number {
  140. return this._bloomWeight;
  141. }
  142. /**
  143. * The strength of the bloom.
  144. */
  145. public set bloomThreshold(value: number) {
  146. if (this._bloomThreshold === value) {
  147. return;
  148. }
  149. this.bloom.threshold = value;
  150. this._bloomThreshold = value;
  151. }
  152. @serialize()
  153. public get bloomThreshold(): number {
  154. return this._bloomThreshold;
  155. }
  156. /**
  157. * The scale of the bloom, lower value will provide better performance.
  158. */
  159. public set bloomScale(value: number) {
  160. if (this._bloomScale === value) {
  161. return;
  162. }
  163. this._bloomScale = value;
  164. // recreate bloom and dispose old as this setting is not dynamic
  165. this._rebuildBloom();
  166. this._buildPipeline();
  167. }
  168. @serialize()
  169. public get bloomScale(): number {
  170. return this._bloomScale;
  171. }
  172. /**
  173. * Enable or disable the bloom from the pipeline
  174. */
  175. public set bloomEnabled(enabled: boolean) {
  176. if (this._bloomEnabled === enabled) {
  177. return;
  178. }
  179. this._bloomEnabled = enabled;
  180. this._buildPipeline();
  181. }
  182. @serialize()
  183. public get bloomEnabled(): boolean {
  184. return this._bloomEnabled;
  185. }
  186. private _rebuildBloom() {
  187. // recreate bloom and dispose old as this setting is not dynamic
  188. var oldBloom = this.bloom;
  189. this.bloom = new BloomEffect(this._scene, this.bloomScale, this._bloomWeight, this.bloomKernel, this._defaultPipelineTextureType, false);
  190. this.bloom.threshold = oldBloom.threshold;
  191. for (var i = 0; i < this._cameras.length; i++) {
  192. oldBloom.disposeEffects(this._cameras[i]);
  193. }
  194. }
  195. /**
  196. * If the depth of field is enabled.
  197. */
  198. @serialize()
  199. public get depthOfFieldEnabled(): boolean {
  200. return this._depthOfFieldEnabled;
  201. }
  202. public set depthOfFieldEnabled(enabled: boolean) {
  203. if (this._depthOfFieldEnabled === enabled) {
  204. return;
  205. }
  206. this._depthOfFieldEnabled = enabled;
  207. this._buildPipeline();
  208. }
  209. /**
  210. * Blur level of the depth of field effect. (Higher blur will effect performance)
  211. */
  212. @serialize()
  213. public get depthOfFieldBlurLevel(): DepthOfFieldEffectBlurLevel {
  214. return this._depthOfFieldBlurLevel;
  215. }
  216. public set depthOfFieldBlurLevel(value: DepthOfFieldEffectBlurLevel) {
  217. if (this._depthOfFieldBlurLevel === value) {
  218. return;
  219. }
  220. this._depthOfFieldBlurLevel = value;
  221. // recreate dof and dispose old as this setting is not dynamic
  222. var oldDof = this.depthOfField;
  223. this.depthOfField = new DepthOfFieldEffect(this._scene, null, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType, false);
  224. this.depthOfField.focalLength = oldDof.focalLength;
  225. this.depthOfField.focusDistance = oldDof.focusDistance;
  226. this.depthOfField.fStop = oldDof.fStop;
  227. this.depthOfField.lensSize = oldDof.lensSize;
  228. for (var i = 0; i < this._cameras.length; i++) {
  229. oldDof.disposeEffects(this._cameras[i]);
  230. }
  231. this._buildPipeline();
  232. }
  233. /**
  234. * If the anti aliasing is enabled.
  235. */
  236. public set fxaaEnabled(enabled: boolean) {
  237. if (this._fxaaEnabled === enabled) {
  238. return;
  239. }
  240. this._fxaaEnabled = enabled;
  241. this._buildPipeline();
  242. }
  243. @serialize()
  244. public get fxaaEnabled(): boolean {
  245. return this._fxaaEnabled;
  246. }
  247. private _samples = 1;
  248. /**
  249. * MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1)
  250. */
  251. public set samples(sampleCount: number) {
  252. if (this._samples === sampleCount) {
  253. return;
  254. }
  255. this._samples = sampleCount;
  256. this._buildPipeline();
  257. }
  258. @serialize()
  259. public get samples(): number {
  260. return this._samples;
  261. }
  262. /**
  263. * If image processing is enabled.
  264. */
  265. public set imageProcessingEnabled(enabled: boolean) {
  266. if (this._imageProcessingEnabled === enabled) {
  267. return;
  268. }
  269. this._imageProcessingEnabled = enabled;
  270. this._buildPipeline();
  271. }
  272. @serialize()
  273. public get imageProcessingEnabled(): boolean {
  274. return this._imageProcessingEnabled;
  275. }
  276. /**
  277. * If glow layer is enabled. (Adds a glow effect to emmissive materials)
  278. */
  279. public set glowLayerEnabled(enabled: boolean) {
  280. if (enabled && !this._glowLayer) {
  281. this._glowLayer = new GlowLayer("", this._scene);
  282. }else if (!enabled && this._glowLayer) {
  283. this._glowLayer.dispose();
  284. this._glowLayer = null;
  285. }
  286. }
  287. @serialize()
  288. public get glowLayerEnabled(): boolean {
  289. return this._glowLayer == null;
  290. }
  291. /**
  292. * Enable or disable the chromaticAberration process from the pipeline
  293. */
  294. public set chromaticAberrationEnabled(enabled: boolean) {
  295. if (this._chromaticAberrationEnabled === enabled) {
  296. return;
  297. }
  298. this._chromaticAberrationEnabled = enabled;
  299. this._buildPipeline();
  300. }
  301. @serialize()
  302. public get chromaticAberrationEnabled(): boolean {
  303. return this._chromaticAberrationEnabled;
  304. }
  305. /**
  306. * Enable or disable the grain process from the pipeline
  307. */
  308. public set grainEnabled(enabled: boolean) {
  309. if (this._grainEnabled === enabled) {
  310. return;
  311. }
  312. this._grainEnabled = enabled;
  313. this._buildPipeline();
  314. }
  315. @serialize()
  316. public get grainEnabled(): boolean {
  317. return this._grainEnabled;
  318. }
  319. /**
  320. * @constructor
  321. * @param name - The rendering pipeline name (default: "")
  322. * @param hdr - If high dynamic range textures should be used (default: true)
  323. * @param scene - The scene linked to this pipeline (default: the last created scene)
  324. * @param cameras - The array of cameras that the rendering pipeline will be attached to (default: scene.cameras)
  325. * @param automaticBuild - if false, you will have to manually call prepare() to update the pipeline (default: true)
  326. */
  327. constructor(name: string = "", hdr: boolean = true, scene: Scene = Engine.LastCreatedScene!, cameras?: Camera[], automaticBuild = true) {
  328. super(scene.getEngine(), name);
  329. this._cameras = cameras || scene.cameras;
  330. this._cameras = this._cameras.slice();
  331. this._camerasToBeAttached = this._cameras.slice();
  332. this._buildAllowed = automaticBuild;
  333. // Initialize
  334. this._scene = scene;
  335. var caps = this._scene.getEngine().getCaps();
  336. this._hdr = hdr && (caps.textureHalfFloatRender || caps.textureFloatRender);
  337. // Misc
  338. if (this._hdr) {
  339. if (caps.textureHalfFloatRender) {
  340. this._defaultPipelineTextureType = Engine.TEXTURETYPE_HALF_FLOAT;
  341. }
  342. else if (caps.textureFloatRender) {
  343. this._defaultPipelineTextureType = Engine.TEXTURETYPE_FLOAT;
  344. }
  345. } else {
  346. this._defaultPipelineTextureType = Engine.TEXTURETYPE_UNSIGNED_INT;
  347. }
  348. // Attach
  349. scene.postProcessRenderPipelineManager.addPipeline(this);
  350. var engine = this._scene.getEngine();
  351. // Create post processes before hand so they can be modified before enabled.
  352. // Block compilation flag is set to true to avoid compilation prior to use, these will be updated on first use in build pipeline.
  353. this.sharpen = new SharpenPostProcess("sharpen", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType, true);
  354. this._sharpenEffect = new PostProcessRenderEffect(engine, this.SharpenPostProcessId, () => { return this.sharpen; }, true);
  355. this.depthOfField = new DepthOfFieldEffect(this._scene, null, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType, true);
  356. this.bloom = new BloomEffect(this._scene, this._bloomScale, this._bloomWeight, this.bloomKernel, this._defaultPipelineTextureType, true);
  357. this.chromaticAberration = new ChromaticAberrationPostProcess("ChromaticAberration", engine.getRenderWidth(), engine.getRenderHeight(), 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType, true);
  358. this._chromaticAberrationEffect = new PostProcessRenderEffect(engine, this.ChromaticAberrationPostProcessId, () => { return this.chromaticAberration; }, true);
  359. this.grain = new GrainPostProcess("Grain", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType, true);
  360. this._grainEffect = new PostProcessRenderEffect(engine, this.GrainPostProcessId, () => { return this.grain; }, true);
  361. this._resizeObserver = engine.onResizeObservable.add(() => {
  362. this._hardwareScaleLevel = engine.getHardwareScalingLevel();
  363. this.bloomKernel = this.bloomKernel;
  364. });
  365. this._imageProcessingConfigurationObserver = this._scene.imageProcessingConfiguration.onUpdateParameters.add(() => {
  366. this.bloom._downscale._exposure = this._scene.imageProcessingConfiguration.exposure;
  367. });
  368. this._buildPipeline();
  369. }
  370. /**
  371. * Force the compilation of the entire pipeline.
  372. */
  373. public prepare(): void {
  374. let previousState = this._buildAllowed;
  375. this._buildAllowed = true;
  376. this._buildPipeline();
  377. this._buildAllowed = previousState;
  378. }
  379. private _hasCleared = false;
  380. private _prevPostProcess: Nullable<PostProcess> = null;
  381. private _prevPrevPostProcess: Nullable<PostProcess> = null;
  382. private _setAutoClearAndTextureSharing(postProcess: PostProcess, skipTextureSharing = false) {
  383. if (this._hasCleared) {
  384. postProcess.autoClear = false;
  385. }else {
  386. postProcess.autoClear = true;
  387. this._scene.autoClear = false;
  388. this._hasCleared = true;
  389. }
  390. if (!skipTextureSharing) {
  391. if (this._prevPrevPostProcess) {
  392. postProcess.shareOutputWith(this._prevPrevPostProcess);
  393. }else {
  394. postProcess.useOwnOutput();
  395. }
  396. if (this._prevPostProcess) {
  397. this._prevPrevPostProcess = this._prevPostProcess;
  398. }
  399. this._prevPostProcess = postProcess;
  400. }
  401. }
  402. private _depthOfFieldSceneObserver: Nullable<Observer<Scene>> = null;
  403. private _buildPipeline() {
  404. if (!this._buildAllowed) {
  405. return;
  406. }
  407. this._scene.autoClear = true;
  408. var engine = this._scene.getEngine();
  409. this._disposePostProcesses();
  410. if (this._cameras !== null) {
  411. this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
  412. // get back cameras to be used to reattach pipeline
  413. this._cameras = this._camerasToBeAttached.slice();
  414. }
  415. this._reset();
  416. this._prevPostProcess = null;
  417. this._prevPrevPostProcess = null;
  418. this._hasCleared = false;
  419. if (this.depthOfFieldEnabled) {
  420. // Multi camera suport
  421. if (this._cameras.length > 1) {
  422. for (let camera of this._cameras) {
  423. const depthRenderer = this._scene.enableDepthRenderer(camera);
  424. depthRenderer.useOnlyInActiveCamera = true;
  425. }
  426. this._depthOfFieldSceneObserver = this._scene.onAfterRenderTargetsRenderObservable.add((scene) => {
  427. if (this._cameras.indexOf(scene.activeCamera!) > -1) {
  428. this.depthOfField.depthTexture = scene.enableDepthRenderer(scene.activeCamera).getDepthMap();
  429. }
  430. });
  431. }
  432. else {
  433. this._scene.onAfterRenderTargetsRenderObservable.remove(this._depthOfFieldSceneObserver);
  434. const depthRenderer = this._scene.enableDepthRenderer(this._cameras[0]);
  435. this.depthOfField.depthTexture = depthRenderer.getDepthMap();
  436. }
  437. if (!this.depthOfField._isReady()) {
  438. this.depthOfField._updateEffects();
  439. }
  440. this.addEffect(this.depthOfField);
  441. this._setAutoClearAndTextureSharing(this.depthOfField._effects[0], true);
  442. }
  443. else {
  444. this._scene.onAfterRenderTargetsRenderObservable.remove(this._depthOfFieldSceneObserver);
  445. }
  446. if (this.bloomEnabled) {
  447. if (!this.bloom._isReady()) {
  448. this.bloom._updateEffects();
  449. }
  450. this.addEffect(this.bloom);
  451. this._setAutoClearAndTextureSharing(this.bloom._effects[0], true);
  452. }
  453. if (this._imageProcessingEnabled) {
  454. this.imageProcessing = new ImageProcessingPostProcess("imageProcessing", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
  455. if (this._hdr) {
  456. this.addEffect(new PostProcessRenderEffect(engine, this.ImageProcessingPostProcessId, () => { return this.imageProcessing; }, true));
  457. this._setAutoClearAndTextureSharing(this.imageProcessing);
  458. } else {
  459. this._scene.imageProcessingConfiguration.applyByPostProcess = false;
  460. }
  461. }
  462. if (this.sharpenEnabled) {
  463. if (!this.sharpen.isReady()) {
  464. this.sharpen.updateEffect();
  465. }
  466. this.addEffect(this._sharpenEffect);
  467. this._setAutoClearAndTextureSharing(this.sharpen);
  468. }
  469. if (this.grainEnabled) {
  470. if (!this.grain.isReady()) {
  471. this.grain.updateEffect();
  472. }
  473. this.addEffect(this._grainEffect);
  474. this._setAutoClearAndTextureSharing(this.grain);
  475. }
  476. if (this.chromaticAberrationEnabled) {
  477. if (!this.chromaticAberration.isReady()) {
  478. this.chromaticAberration.updateEffect();
  479. }
  480. this.addEffect(this._chromaticAberrationEffect);
  481. this._setAutoClearAndTextureSharing(this.chromaticAberration);
  482. }
  483. if (this.fxaaEnabled) {
  484. this.fxaa = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
  485. this.addEffect(new PostProcessRenderEffect(engine, this.FxaaPostProcessId, () => { return this.fxaa; }, true));
  486. this._setAutoClearAndTextureSharing(this.fxaa, true);
  487. }
  488. if (this._cameras !== null) {
  489. this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
  490. }
  491. if (!this._enableMSAAOnFirstPostProcess(this.samples) && this.samples > 1) {
  492. Tools.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0");
  493. }
  494. }
  495. private _disposePostProcesses(disposeNonRecreated = false): void {
  496. for (var i = 0; i < this._cameras.length; i++) {
  497. var camera = this._cameras[i];
  498. if (this.imageProcessing) {
  499. this.imageProcessing.dispose(camera);
  500. }
  501. if (this.fxaa) {
  502. this.fxaa.dispose(camera);
  503. }
  504. // These are created in the constructor and should not be disposed on every pipeline change
  505. if (disposeNonRecreated) {
  506. if (this.sharpen) {
  507. this.sharpen.dispose(camera);
  508. }
  509. if (this.depthOfField) {
  510. this._scene.onAfterRenderTargetsRenderObservable.remove(this._depthOfFieldSceneObserver);
  511. this.depthOfField.disposeEffects(camera);
  512. }
  513. if (this.bloom) {
  514. this.bloom.disposeEffects(camera);
  515. }
  516. if (this.chromaticAberration) {
  517. this.chromaticAberration.dispose(camera);
  518. }
  519. if (this.grain) {
  520. this.grain.dispose(camera);
  521. }
  522. if (this._glowLayer) {
  523. this._glowLayer.dispose();
  524. }
  525. }
  526. }
  527. (<any>this.imageProcessing) = null;
  528. (<any>this.fxaa) = null;
  529. if (disposeNonRecreated) {
  530. (<any>this.sharpen) = null;
  531. (<any>this._sharpenEffect) = null;
  532. (<any>this.depthOfField) = null;
  533. (<any>this.bloom) = null;
  534. (<any>this.chromaticAberration) = null;
  535. (<any>this._chromaticAberrationEffect) = null;
  536. (<any>this.grain) = null;
  537. (<any>this._grainEffect) = null;
  538. this._glowLayer = null;
  539. }
  540. }
  541. /**
  542. * Adds a camera to the pipeline
  543. * @param camera the camera to be added
  544. */
  545. public addCamera(camera: Camera): void {
  546. this._camerasToBeAttached.push(camera);
  547. this._buildPipeline();
  548. }
  549. /**
  550. * Removes a camera from the pipeline
  551. * @param camera the camera to remove
  552. */
  553. public removeCamera(camera: Camera): void {
  554. var index = this._camerasToBeAttached.indexOf(camera);
  555. this._camerasToBeAttached.splice(index, 1);
  556. this._buildPipeline();
  557. }
  558. /**
  559. * Dispose of the pipeline and stop all post processes
  560. */
  561. public dispose(): void {
  562. this._disposePostProcesses(true);
  563. this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
  564. this._scene.autoClear = true;
  565. if (this._resizeObserver) {
  566. this._scene.getEngine().onResizeObservable.remove(this._resizeObserver);
  567. this._resizeObserver = null;
  568. }
  569. this._scene.imageProcessingConfiguration.onUpdateParameters.remove(this._imageProcessingConfigurationObserver);
  570. super.dispose();
  571. }
  572. /**
  573. * Serialize the rendering pipeline (Used when exporting)
  574. * @returns the serialized object
  575. */
  576. public serialize(): any {
  577. var serializationObject = SerializationHelper.Serialize(this);
  578. serializationObject.customType = "DefaultRenderingPipeline";
  579. return serializationObject;
  580. }
  581. /**
  582. * Parse the serialized pipeline
  583. * @param source Source pipeline.
  584. * @param scene The scene to load the pipeline to.
  585. * @param rootUrl The URL of the serialized pipeline.
  586. * @returns An instantiated pipeline from the serialized object.
  587. */
  588. public static Parse(source: any, scene: Scene, rootUrl: string): DefaultRenderingPipeline {
  589. return SerializationHelper.Parse(() => new DefaultRenderingPipeline(source._name, source._name._hdr, scene), source, scene, rootUrl);
  590. }
  591. }