standardRenderingPipeline.ts 49 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109
  1. import { Nullable } from "types";
  2. import { serialize, serializeAsTexture, SerializationHelper } from "Misc/decorators";
  3. import { IAnimatable } from "Misc/tools";
  4. import { Logger } from "Misc/logger";
  5. import { Vector2, Vector3, Matrix, Vector4 } from "Maths/math";
  6. import { Scalar } from "Maths/math.scalar";
  7. import { Camera } from "Cameras/camera";
  8. import { Effect } from "Materials/effect";
  9. import { Texture } from "Materials/Textures/texture";
  10. import { PostProcess } from "PostProcesses/postProcess";
  11. import { PostProcessRenderPipeline } from "PostProcesses/RenderPipeline/postProcessRenderPipeline";
  12. import { PostProcessRenderEffect } from "PostProcesses/RenderPipeline/postProcessRenderEffect";
  13. import { BlurPostProcess } from "PostProcesses/blurPostProcess";
  14. import { FxaaPostProcess } from "PostProcesses/fxaaPostProcess";
  15. import { IDisposable } from "scene";
  16. import { SpotLight } from "Lights/spotLight";
  17. import { DirectionalLight } from "Lights/directionalLight";
  18. import { GeometryBufferRenderer } from "Rendering/geometryBufferRenderer";
  19. import { Scene } from "scene";
  20. import { Animation } from "Animations/animation";
  21. import { Constants } from "Engines/constants";
  22. /**
  23. * Standard rendering pipeline
  24. * Default pipeline should be used going forward but the standard pipeline will be kept for backwards compatibility.
  25. * @see https://doc.babylonjs.com/how_to/using_standard_rendering_pipeline
  26. */
  27. export class StandardRenderingPipeline extends PostProcessRenderPipeline implements IDisposable, IAnimatable {
  28. /**
  29. * Public members
  30. */
  31. // Post-processes
  32. /**
  33. * Post-process which contains the original scene color before the pipeline applies all the effects
  34. */
  35. public originalPostProcess: Nullable<PostProcess>;
  36. /**
  37. * Post-process used to down scale an image x4
  38. */
  39. public downSampleX4PostProcess: Nullable<PostProcess> = null;
  40. /**
  41. * Post-process used to calculate the illuminated surfaces controlled by a threshold
  42. */
  43. public brightPassPostProcess: Nullable<PostProcess> = null;
  44. /**
  45. * Post-process array storing all the horizontal blur post-processes used by the pipeline
  46. */
  47. public blurHPostProcesses: PostProcess[] = [];
  48. /**
  49. * Post-process array storing all the vertical blur post-processes used by the pipeline
  50. */
  51. public blurVPostProcesses: PostProcess[] = [];
  52. /**
  53. * Post-process used to add colors of 2 textures (typically brightness + real scene color)
  54. */
  55. public textureAdderPostProcess: Nullable<PostProcess> = null;
  56. /**
  57. * Post-process used to create volumetric lighting effect
  58. */
  59. public volumetricLightPostProcess: Nullable<PostProcess> = null;
  60. /**
  61. * Post-process used to smooth the previous volumetric light post-process on the X axis
  62. */
  63. public volumetricLightSmoothXPostProcess: Nullable<BlurPostProcess> = null;
  64. /**
  65. * Post-process used to smooth the previous volumetric light post-process on the Y axis
  66. */
  67. public volumetricLightSmoothYPostProcess: Nullable<BlurPostProcess> = null;
  68. /**
  69. * Post-process used to merge the volumetric light effect and the real scene color
  70. */
  71. public volumetricLightMergePostProces: Nullable<PostProcess> = null;
  72. /**
  73. * Post-process used to store the final volumetric light post-process (attach/detach for debug purpose)
  74. */
  75. public volumetricLightFinalPostProcess: Nullable<PostProcess> = null;
  76. /**
  77. * Base post-process used to calculate the average luminance of the final image for HDR
  78. */
  79. public luminancePostProcess: Nullable<PostProcess> = null;
  80. /**
  81. * Post-processes used to create down sample post-processes in order to get
  82. * the average luminance of the final image for HDR
  83. * Array of length "StandardRenderingPipeline.LuminanceSteps"
  84. */
  85. public luminanceDownSamplePostProcesses: PostProcess[] = [];
  86. /**
  87. * Post-process used to create a HDR effect (light adaptation)
  88. */
  89. public hdrPostProcess: Nullable<PostProcess> = null;
  90. /**
  91. * Post-process used to store the final texture adder post-process (attach/detach for debug purpose)
  92. */
  93. public textureAdderFinalPostProcess: Nullable<PostProcess> = null;
  94. /**
  95. * Post-process used to store the final lens flare post-process (attach/detach for debug purpose)
  96. */
  97. public lensFlareFinalPostProcess: Nullable<PostProcess> = null;
  98. /**
  99. * Post-process used to merge the final HDR post-process and the real scene color
  100. */
  101. public hdrFinalPostProcess: Nullable<PostProcess> = null;
  102. /**
  103. * Post-process used to create a lens flare effect
  104. */
  105. public lensFlarePostProcess: Nullable<PostProcess> = null;
  106. /**
  107. * Post-process that merges the result of the lens flare post-process and the real scene color
  108. */
  109. public lensFlareComposePostProcess: Nullable<PostProcess> = null;
  110. /**
  111. * Post-process used to create a motion blur effect
  112. */
  113. public motionBlurPostProcess: Nullable<PostProcess> = null;
  114. /**
  115. * Post-process used to create a depth of field effect
  116. */
  117. public depthOfFieldPostProcess: Nullable<PostProcess> = null;
  118. /**
  119. * The Fast Approximate Anti-Aliasing post process which attemps to remove aliasing from an image.
  120. */
  121. public fxaaPostProcess: Nullable<FxaaPostProcess> = null;
  122. // Values
  123. /**
  124. * Represents the brightness threshold in order to configure the illuminated surfaces
  125. */
  126. @serialize()
  127. public brightThreshold: number = 1.0;
  128. /**
  129. * Configures the blur intensity used for surexposed surfaces are highlighted surfaces (light halo)
  130. */
  131. @serialize()
  132. public blurWidth: number = 512.0;
  133. /**
  134. * Sets if the blur for highlighted surfaces must be only horizontal
  135. */
  136. @serialize()
  137. public horizontalBlur: boolean = false;
  138. /**
  139. * Sets the overall exposure used by the pipeline
  140. */
  141. @serialize()
  142. public exposure: number = 1.0;
  143. /**
  144. * Texture used typically to simulate "dirty" on camera lens
  145. */
  146. @serializeAsTexture("lensTexture")
  147. public lensTexture: Nullable<Texture> = null;
  148. /**
  149. * Represents the offset coefficient based on Rayleigh principle. Typically in interval [-0.2, 0.2]
  150. */
  151. @serialize()
  152. public volumetricLightCoefficient: number = 0.2;
  153. /**
  154. * The overall power of volumetric lights, typically in interval [0, 10] maximum
  155. */
  156. @serialize()
  157. public volumetricLightPower: number = 4.0;
  158. /**
  159. * Used the set the blur intensity to smooth the volumetric lights
  160. */
  161. @serialize()
  162. public volumetricLightBlurScale: number = 64.0;
  163. /**
  164. * Light (spot or directional) used to generate the volumetric lights rays
  165. * The source light must have a shadow generate so the pipeline can get its
  166. * depth map
  167. */
  168. public sourceLight: Nullable<SpotLight | DirectionalLight> = null;
  169. /**
  170. * For eye adaptation, represents the minimum luminance the eye can see
  171. */
  172. @serialize()
  173. public hdrMinimumLuminance: number = 1.0;
  174. /**
  175. * For eye adaptation, represents the decrease luminance speed
  176. */
  177. @serialize()
  178. public hdrDecreaseRate: number = 0.5;
  179. /**
  180. * For eye adaptation, represents the increase luminance speed
  181. */
  182. @serialize()
  183. public hdrIncreaseRate: number = 0.5;
  184. /**
  185. * Lens color texture used by the lens flare effect. Mandatory if lens flare effect enabled
  186. */
  187. @serializeAsTexture("lensColorTexture")
  188. public lensColorTexture: Nullable<Texture> = null;
  189. /**
  190. * The overall strengh for the lens flare effect
  191. */
  192. @serialize()
  193. public lensFlareStrength: number = 20.0;
  194. /**
  195. * Dispersion coefficient for lens flare ghosts
  196. */
  197. @serialize()
  198. public lensFlareGhostDispersal: number = 1.4;
  199. /**
  200. * Main lens flare halo width
  201. */
  202. @serialize()
  203. public lensFlareHaloWidth: number = 0.7;
  204. /**
  205. * Based on the lens distortion effect, defines how much the lens flare result
  206. * is distorted
  207. */
  208. @serialize()
  209. public lensFlareDistortionStrength: number = 16.0;
  210. /**
  211. * Lens star texture must be used to simulate rays on the flares and is available
  212. * in the documentation
  213. */
  214. @serializeAsTexture("lensStarTexture")
  215. public lensStarTexture: Nullable<Texture> = null;
  216. /**
  217. * As the "lensTexture" (can be the same texture or different), it is used to apply the lens
  218. * flare effect by taking account of the dirt texture
  219. */
  220. @serializeAsTexture("lensFlareDirtTexture")
  221. public lensFlareDirtTexture: Nullable<Texture> = null;
  222. /**
  223. * Represents the focal length for the depth of field effect
  224. */
  225. @serialize()
  226. public depthOfFieldDistance: number = 10.0;
  227. /**
  228. * Represents the blur intensity for the blurred part of the depth of field effect
  229. */
  230. @serialize()
  231. public depthOfFieldBlurWidth: number = 64.0;
  232. /**
  233. * For motion blur, defines how much the image is blurred by the movement
  234. */
  235. @serialize()
  236. public motionStrength: number = 1.0;
  237. /**
  238. * List of animations for the pipeline (IAnimatable implementation)
  239. */
  240. public animations: Animation[] = [];
  241. /**
  242. * Private members
  243. */
  244. private _scene: Scene;
  245. private _currentDepthOfFieldSource: Nullable<PostProcess> = null;
  246. private _basePostProcess: Nullable<PostProcess>;
  247. private _hdrCurrentLuminance: number = 1.0;
  248. private _floatTextureType: number;
  249. @serialize()
  250. private _ratio: number;
  251. // Getters and setters
  252. private _bloomEnabled: boolean = false;
  253. private _depthOfFieldEnabled: boolean = false;
  254. private _vlsEnabled: boolean = false;
  255. private _lensFlareEnabled: boolean = false;
  256. private _hdrEnabled: boolean = false;
  257. private _motionBlurEnabled: boolean = false;
  258. private _fxaaEnabled: boolean = false;
  259. private _motionBlurSamples: number = 64.0;
  260. private _volumetricLightStepsCount: number = 50.0;
  261. private _samples: number = 1;
  262. /**
  263. * @ignore
  264. * Specifies if the bloom pipeline is enabled
  265. */
  266. @serialize()
  267. public get BloomEnabled(): boolean {
  268. return this._bloomEnabled;
  269. }
  270. public set BloomEnabled(enabled: boolean) {
  271. if (this._bloomEnabled === enabled) {
  272. return;
  273. }
  274. this._bloomEnabled = enabled;
  275. this._buildPipeline();
  276. }
  277. /**
  278. * @ignore
  279. * Specifies if the depth of field pipeline is enabed
  280. */
  281. @serialize()
  282. public get DepthOfFieldEnabled(): boolean {
  283. return this._depthOfFieldEnabled;
  284. }
  285. public set DepthOfFieldEnabled(enabled: boolean) {
  286. if (this._depthOfFieldEnabled === enabled) {
  287. return;
  288. }
  289. this._depthOfFieldEnabled = enabled;
  290. this._buildPipeline();
  291. }
  292. /**
  293. * @ignore
  294. * Specifies if the lens flare pipeline is enabed
  295. */
  296. @serialize()
  297. public get LensFlareEnabled(): boolean {
  298. return this._lensFlareEnabled;
  299. }
  300. public set LensFlareEnabled(enabled: boolean) {
  301. if (this._lensFlareEnabled === enabled) {
  302. return;
  303. }
  304. this._lensFlareEnabled = enabled;
  305. this._buildPipeline();
  306. }
  307. /**
  308. * @ignore
  309. * Specifies if the HDR pipeline is enabled
  310. */
  311. @serialize()
  312. public get HDREnabled(): boolean {
  313. return this._hdrEnabled;
  314. }
  315. public set HDREnabled(enabled: boolean) {
  316. if (this._hdrEnabled === enabled) {
  317. return;
  318. }
  319. this._hdrEnabled = enabled;
  320. this._buildPipeline();
  321. }
  322. /**
  323. * @ignore
  324. * Specifies if the volumetric lights scattering effect is enabled
  325. */
  326. @serialize()
  327. public get VLSEnabled(): boolean {
  328. return this._vlsEnabled;
  329. }
  330. public set VLSEnabled(enabled) {
  331. if (this._vlsEnabled === enabled) {
  332. return;
  333. }
  334. if (enabled) {
  335. var geometry = this._scene.enableGeometryBufferRenderer();
  336. if (!geometry) {
  337. Logger.Warn("Geometry renderer is not supported, cannot create volumetric lights in Standard Rendering Pipeline");
  338. return;
  339. }
  340. }
  341. this._vlsEnabled = enabled;
  342. this._buildPipeline();
  343. }
  344. /**
  345. * @ignore
  346. * Specifies if the motion blur effect is enabled
  347. */
  348. @serialize()
  349. public get MotionBlurEnabled(): boolean {
  350. return this._motionBlurEnabled;
  351. }
  352. public set MotionBlurEnabled(enabled: boolean) {
  353. if (this._motionBlurEnabled === enabled) {
  354. return;
  355. }
  356. this._motionBlurEnabled = enabled;
  357. this._buildPipeline();
  358. }
  359. /**
  360. * Specifies if anti-aliasing is enabled
  361. */
  362. @serialize()
  363. public get fxaaEnabled(): boolean {
  364. return this._fxaaEnabled;
  365. }
  366. public set fxaaEnabled(enabled: boolean) {
  367. if (this._fxaaEnabled === enabled) {
  368. return;
  369. }
  370. this._fxaaEnabled = enabled;
  371. this._buildPipeline();
  372. }
  373. /**
  374. * Specifies the number of steps used to calculate the volumetric lights
  375. * Typically in interval [50, 200]
  376. */
  377. @serialize()
  378. public get volumetricLightStepsCount(): number {
  379. return this._volumetricLightStepsCount;
  380. }
  381. public set volumetricLightStepsCount(count: number) {
  382. if (this.volumetricLightPostProcess) {
  383. this.volumetricLightPostProcess.updateEffect("#define VLS\n#define NB_STEPS " + count.toFixed(1));
  384. }
  385. this._volumetricLightStepsCount = count;
  386. }
  387. /**
  388. * Specifies the number of samples used for the motion blur effect
  389. * Typically in interval [16, 64]
  390. */
  391. @serialize()
  392. public get motionBlurSamples(): number {
  393. return this._motionBlurSamples;
  394. }
  395. public set motionBlurSamples(samples: number) {
  396. if (this.motionBlurPostProcess) {
  397. this.motionBlurPostProcess.updateEffect("#define MOTION_BLUR\n#define MAX_MOTION_SAMPLES " + samples.toFixed(1));
  398. }
  399. this._motionBlurSamples = samples;
  400. }
  401. /**
  402. * Specifies MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1)
  403. */
  404. @serialize()
  405. public get samples(): number {
  406. return this._samples;
  407. }
  408. public set samples(sampleCount: number) {
  409. if (this._samples === sampleCount) {
  410. return;
  411. }
  412. this._samples = sampleCount;
  413. this._buildPipeline();
  414. }
  415. /**
  416. * Default pipeline should be used going forward but the standard pipeline will be kept for backwards compatibility.
  417. * @constructor
  418. * @param name The rendering pipeline name
  419. * @param scene The scene linked to this pipeline
  420. * @param ratio The size of the postprocesses (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5)
  421. * @param originalPostProcess the custom original color post-process. Must be "reusable". Can be null.
  422. * @param cameras The array of cameras that the rendering pipeline will be attached to
  423. */
  424. constructor(name: string, scene: Scene, ratio: number, originalPostProcess: Nullable<PostProcess> = null, cameras?: Camera[]) {
  425. super(scene.getEngine(), name);
  426. this._cameras = cameras || [];
  427. // Initialize
  428. this._scene = scene;
  429. this._basePostProcess = originalPostProcess;
  430. this._ratio = ratio;
  431. // Misc
  432. this._floatTextureType = scene.getEngine().getCaps().textureFloatRender ? Constants.TEXTURETYPE_FLOAT : Constants.TEXTURETYPE_HALF_FLOAT;
  433. // Finish
  434. scene.postProcessRenderPipelineManager.addPipeline(this);
  435. this._buildPipeline();
  436. }
  437. private _buildPipeline(): void {
  438. var ratio = this._ratio;
  439. var scene = this._scene;
  440. this._disposePostProcesses();
  441. this._reset();
  442. // Create pass post-process
  443. if (!this._basePostProcess) {
  444. this.originalPostProcess = new PostProcess("HDRPass", "standard", [], [], ratio, null, Constants.TEXTURE_BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", this._floatTextureType);
  445. this.originalPostProcess.onApply = () => {
  446. this._currentDepthOfFieldSource = this.originalPostProcess;
  447. };
  448. }
  449. else {
  450. this.originalPostProcess = this._basePostProcess;
  451. }
  452. if (this._bloomEnabled || this._vlsEnabled || this._lensFlareEnabled || this._depthOfFieldEnabled || this._motionBlurEnabled) {
  453. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPassPostProcess", () => { return this.originalPostProcess; }, true));
  454. }
  455. this._currentDepthOfFieldSource = this.originalPostProcess;
  456. if (this._bloomEnabled) {
  457. // Create down sample X4 post-process
  458. this._createDownSampleX4PostProcess(scene, ratio / 2);
  459. // Create bright pass post-process
  460. this._createBrightPassPostProcess(scene, ratio / 2);
  461. // Create gaussian blur post-processes (down sampling blurs)
  462. this._createBlurPostProcesses(scene, ratio / 4, 1);
  463. // Create texture adder post-process
  464. this._createTextureAdderPostProcess(scene, ratio);
  465. // Create depth-of-field source post-process
  466. this.textureAdderFinalPostProcess = new PostProcess("HDRDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT);
  467. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBaseDepthOfFieldSource", () => { return this.textureAdderFinalPostProcess; }, true));
  468. }
  469. if (this._vlsEnabled) {
  470. // Create volumetric light
  471. this._createVolumetricLightPostProcess(scene, ratio);
  472. // Create volumetric light final post-process
  473. this.volumetricLightFinalPostProcess = new PostProcess("HDRVLSFinal", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT);
  474. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSFinal", () => { return this.volumetricLightFinalPostProcess; }, true));
  475. }
  476. if (this._lensFlareEnabled) {
  477. // Create lens flare post-process
  478. this._createLensFlarePostProcess(scene, ratio);
  479. // Create depth-of-field source post-process post lens-flare and disable it now
  480. this.lensFlareFinalPostProcess = new PostProcess("HDRPostLensFlareDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT);
  481. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostLensFlareDepthOfFieldSource", () => { return this.lensFlareFinalPostProcess; }, true));
  482. }
  483. if (this._hdrEnabled) {
  484. // Create luminance
  485. this._createLuminancePostProcesses(scene, this._floatTextureType);
  486. // Create HDR
  487. this._createHdrPostProcess(scene, ratio);
  488. // Create depth-of-field source post-process post hdr and disable it now
  489. this.hdrFinalPostProcess = new PostProcess("HDRPostHDReDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT);
  490. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostHDReDepthOfFieldSource", () => { return this.hdrFinalPostProcess; }, true));
  491. }
  492. if (this._depthOfFieldEnabled) {
  493. // Create gaussian blur used by depth-of-field
  494. this._createBlurPostProcesses(scene, ratio / 2, 3, "depthOfFieldBlurWidth");
  495. // Create depth-of-field post-process
  496. this._createDepthOfFieldPostProcess(scene, ratio);
  497. }
  498. if (this._motionBlurEnabled) {
  499. // Create motion blur post-process
  500. this._createMotionBlurPostProcess(scene, ratio);
  501. }
  502. if (this._fxaaEnabled) {
  503. // Create fxaa post-process
  504. this.fxaaPostProcess = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT);
  505. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRFxaa", () => { return this.fxaaPostProcess; }, true));
  506. }
  507. if (this._cameras !== null) {
  508. this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
  509. }
  510. if (!this._enableMSAAOnFirstPostProcess(this._samples) && this._samples > 1) {
  511. Logger.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0");
  512. }
  513. }
  514. // Down Sample X4 Post-Processs
  515. private _createDownSampleX4PostProcess(scene: Scene, ratio: number): void {
  516. var downSampleX4Offsets = new Array<number>(32);
  517. this.downSampleX4PostProcess = new PostProcess("HDRDownSampleX4", "standard", ["dsOffsets"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DOWN_SAMPLE_X4", Constants.TEXTURETYPE_UNSIGNED_INT);
  518. this.downSampleX4PostProcess.onApply = (effect: Effect) => {
  519. var id = 0;
  520. let width = (<PostProcess>this.downSampleX4PostProcess).width;
  521. let height = (<PostProcess>this.downSampleX4PostProcess).height;
  522. for (var i = -2; i < 2; i++) {
  523. for (var j = -2; j < 2; j++) {
  524. downSampleX4Offsets[id] = (i + 0.5) * (1.0 / width);
  525. downSampleX4Offsets[id + 1] = (j + 0.5) * (1.0 / height);
  526. id += 2;
  527. }
  528. }
  529. effect.setArray2("dsOffsets", downSampleX4Offsets);
  530. };
  531. // Add to pipeline
  532. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDownSampleX4", () => { return this.downSampleX4PostProcess; }, true));
  533. }
  534. // Brightpass Post-Process
  535. private _createBrightPassPostProcess(scene: Scene, ratio: number): void {
  536. var brightOffsets = new Array<number>(8);
  537. this.brightPassPostProcess = new PostProcess("HDRBrightPass", "standard", ["dsOffsets", "brightThreshold"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define BRIGHT_PASS", Constants.TEXTURETYPE_UNSIGNED_INT);
  538. this.brightPassPostProcess.onApply = (effect: Effect) => {
  539. var sU = (1.0 / (<PostProcess>this.brightPassPostProcess).width);
  540. var sV = (1.0 / (<PostProcess>this.brightPassPostProcess).height);
  541. brightOffsets[0] = -0.5 * sU;
  542. brightOffsets[1] = 0.5 * sV;
  543. brightOffsets[2] = 0.5 * sU;
  544. brightOffsets[3] = 0.5 * sV;
  545. brightOffsets[4] = -0.5 * sU;
  546. brightOffsets[5] = -0.5 * sV;
  547. brightOffsets[6] = 0.5 * sU;
  548. brightOffsets[7] = -0.5 * sV;
  549. effect.setArray2("dsOffsets", brightOffsets);
  550. effect.setFloat("brightThreshold", this.brightThreshold);
  551. };
  552. // Add to pipeline
  553. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBrightPass", () => { return this.brightPassPostProcess; }, true));
  554. }
  555. // Create blur H&V post-processes
  556. private _createBlurPostProcesses(scene: Scene, ratio: number, indice: number, blurWidthKey: string = "blurWidth"): void {
  557. var engine = scene.getEngine();
  558. var blurX = new BlurPostProcess("HDRBlurH" + "_" + indice, new Vector2(1, 0), (<any>this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT);
  559. var blurY = new BlurPostProcess("HDRBlurV" + "_" + indice, new Vector2(0, 1), (<any>this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT);
  560. blurX.onActivateObservable.add(() => {
  561. let dw = blurX.width / engine.getRenderWidth();
  562. blurX.kernel = (<any>this)[blurWidthKey] * dw;
  563. });
  564. blurY.onActivateObservable.add(() => {
  565. let dw = blurY.height / engine.getRenderHeight();
  566. blurY.kernel = this.horizontalBlur ? 64 * dw : (<any>this)[blurWidthKey] * dw;
  567. });
  568. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBlurH" + indice, () => { return blurX; }, true));
  569. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBlurV" + indice, () => { return blurY; }, true));
  570. this.blurHPostProcesses.push(blurX);
  571. this.blurVPostProcesses.push(blurY);
  572. }
  573. // Create texture adder post-process
  574. private _createTextureAdderPostProcess(scene: Scene, ratio: number): void {
  575. this.textureAdderPostProcess = new PostProcess("HDRTextureAdder", "standard", ["exposure"], ["otherSampler", "lensSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define TEXTURE_ADDER", Constants.TEXTURETYPE_UNSIGNED_INT);
  576. this.textureAdderPostProcess.onApply = (effect: Effect) => {
  577. effect.setTextureFromPostProcess("otherSampler", this._vlsEnabled ? this._currentDepthOfFieldSource : this.originalPostProcess);
  578. effect.setTexture("lensSampler", this.lensTexture);
  579. effect.setFloat("exposure", this.exposure);
  580. this._currentDepthOfFieldSource = this.textureAdderFinalPostProcess;
  581. };
  582. // Add to pipeline
  583. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRTextureAdder", () => { return this.textureAdderPostProcess; }, true));
  584. }
  585. private _createVolumetricLightPostProcess(scene: Scene, ratio: number): void {
  586. var geometryRenderer = <GeometryBufferRenderer>scene.enableGeometryBufferRenderer();
  587. geometryRenderer.enablePosition = true;
  588. var geometry = geometryRenderer.getGBuffer();
  589. // Base post-process
  590. this.volumetricLightPostProcess = new PostProcess("HDRVLS", "standard",
  591. ["shadowViewProjection", "cameraPosition", "sunDirection", "sunColor", "scatteringCoefficient", "scatteringPower", "depthValues"],
  592. ["shadowMapSampler", "positionSampler"],
  593. ratio / 8,
  594. null,
  595. Texture.BILINEAR_SAMPLINGMODE,
  596. scene.getEngine(),
  597. false, "#define VLS\n#define NB_STEPS " + this._volumetricLightStepsCount.toFixed(1));
  598. var depthValues = Vector2.Zero();
  599. this.volumetricLightPostProcess.onApply = (effect: Effect) => {
  600. if (this.sourceLight && this.sourceLight.getShadowGenerator() && this._scene.activeCamera) {
  601. var generator = this.sourceLight.getShadowGenerator()!;
  602. effect.setTexture("shadowMapSampler", generator.getShadowMap());
  603. effect.setTexture("positionSampler", geometry.textures[2]);
  604. effect.setColor3("sunColor", this.sourceLight.diffuse);
  605. effect.setVector3("sunDirection", this.sourceLight.getShadowDirection());
  606. effect.setVector3("cameraPosition", this._scene.activeCamera.globalPosition);
  607. effect.setMatrix("shadowViewProjection", generator.getTransformMatrix());
  608. effect.setFloat("scatteringCoefficient", this.volumetricLightCoefficient);
  609. effect.setFloat("scatteringPower", this.volumetricLightPower);
  610. depthValues.x = this.sourceLight.getDepthMinZ(this._scene.activeCamera);
  611. depthValues.y = this.sourceLight.getDepthMaxZ(this._scene.activeCamera);
  612. effect.setVector2("depthValues", depthValues);
  613. }
  614. };
  615. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLS", () => { return this.volumetricLightPostProcess; }, true));
  616. // Smooth
  617. this._createBlurPostProcesses(scene, ratio / 4, 0, "volumetricLightBlurScale");
  618. // Merge
  619. this.volumetricLightMergePostProces = new PostProcess("HDRVLSMerge", "standard", [], ["originalSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define VLSMERGE");
  620. this.volumetricLightMergePostProces.onApply = (effect: Effect) => {
  621. effect.setTextureFromPostProcess("originalSampler", this._bloomEnabled ? this.textureAdderFinalPostProcess : this.originalPostProcess);
  622. this._currentDepthOfFieldSource = this.volumetricLightFinalPostProcess;
  623. };
  624. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSMerge", () => { return this.volumetricLightMergePostProces; }, true));
  625. }
  626. // Create luminance
  627. private _createLuminancePostProcesses(scene: Scene, textureType: number): void {
  628. // Create luminance
  629. var size = Math.pow(3, StandardRenderingPipeline.LuminanceSteps);
  630. this.luminancePostProcess = new PostProcess("HDRLuminance", "standard", ["lumOffsets"], [], { width: size, height: size }, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LUMINANCE", textureType);
  631. var offsets: number[] = [];
  632. this.luminancePostProcess.onApply = (effect: Effect) => {
  633. var sU = (1.0 / (<PostProcess>this.luminancePostProcess).width);
  634. var sV = (1.0 / (<PostProcess>this.luminancePostProcess).height);
  635. offsets[0] = -0.5 * sU;
  636. offsets[1] = 0.5 * sV;
  637. offsets[2] = 0.5 * sU;
  638. offsets[3] = 0.5 * sV;
  639. offsets[4] = -0.5 * sU;
  640. offsets[5] = -0.5 * sV;
  641. offsets[6] = 0.5 * sU;
  642. offsets[7] = -0.5 * sV;
  643. effect.setArray2("lumOffsets", offsets);
  644. };
  645. // Add to pipeline
  646. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLuminance", () => { return this.luminancePostProcess; }, true));
  647. // Create down sample luminance
  648. for (var i = StandardRenderingPipeline.LuminanceSteps - 1; i >= 0; i--) {
  649. var size = Math.pow(3, i);
  650. var defines = "#define LUMINANCE_DOWN_SAMPLE\n";
  651. if (i === 0) {
  652. defines += "#define FINAL_DOWN_SAMPLER";
  653. }
  654. var postProcess = new PostProcess("HDRLuminanceDownSample" + i, "standard", ["dsOffsets", "halfDestPixelSize"], [], { width: size, height: size }, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, defines, textureType);
  655. this.luminanceDownSamplePostProcesses.push(postProcess);
  656. }
  657. // Create callbacks and add effects
  658. var lastLuminance: Nullable<PostProcess> = this.luminancePostProcess;
  659. this.luminanceDownSamplePostProcesses.forEach((pp, index) => {
  660. var downSampleOffsets = new Array<number>(18);
  661. pp.onApply = (effect: Effect) => {
  662. if (!lastLuminance) {
  663. return;
  664. }
  665. var id = 0;
  666. for (var x = -1; x < 2; x++) {
  667. for (var y = -1; y < 2; y++) {
  668. downSampleOffsets[id] = x / lastLuminance.width;
  669. downSampleOffsets[id + 1] = y / lastLuminance.height;
  670. id += 2;
  671. }
  672. }
  673. effect.setArray2("dsOffsets", downSampleOffsets);
  674. effect.setFloat("halfDestPixelSize", 0.5 / lastLuminance.width);
  675. if (index === this.luminanceDownSamplePostProcesses.length - 1) {
  676. lastLuminance = this.luminancePostProcess;
  677. } else {
  678. lastLuminance = pp;
  679. }
  680. };
  681. if (index === this.luminanceDownSamplePostProcesses.length - 1) {
  682. pp.onAfterRender = () => {
  683. var pixel = scene.getEngine().readPixels(0, 0, 1, 1);
  684. var bit_shift = new Vector4(1.0 / (255.0 * 255.0 * 255.0), 1.0 / (255.0 * 255.0), 1.0 / 255.0, 1.0);
  685. this._hdrCurrentLuminance = (pixel[0] * bit_shift.x + pixel[1] * bit_shift.y + pixel[2] * bit_shift.z + pixel[3] * bit_shift.w) / 100.0;
  686. };
  687. }
  688. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLuminanceDownSample" + index, () => { return pp; }, true));
  689. });
  690. }
  691. // Create HDR post-process
  692. private _createHdrPostProcess(scene: Scene, ratio: number): void {
  693. this.hdrPostProcess = new PostProcess("HDR", "standard", ["averageLuminance"], ["textureAdderSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define HDR", Constants.TEXTURETYPE_UNSIGNED_INT);
  694. var outputLiminance = 1;
  695. var time = 0;
  696. var lastTime = 0;
  697. this.hdrPostProcess.onApply = (effect: Effect) => {
  698. effect.setTextureFromPostProcess("textureAdderSampler", this._currentDepthOfFieldSource);
  699. time += scene.getEngine().getDeltaTime();
  700. if (outputLiminance < 0) {
  701. outputLiminance = this._hdrCurrentLuminance;
  702. } else {
  703. var dt = (lastTime - time) / 1000.0;
  704. if (this._hdrCurrentLuminance < outputLiminance + this.hdrDecreaseRate * dt) {
  705. outputLiminance += this.hdrDecreaseRate * dt;
  706. }
  707. else if (this._hdrCurrentLuminance > outputLiminance - this.hdrIncreaseRate * dt) {
  708. outputLiminance -= this.hdrIncreaseRate * dt;
  709. }
  710. else {
  711. outputLiminance = this._hdrCurrentLuminance;
  712. }
  713. }
  714. outputLiminance = Scalar.Clamp(outputLiminance, this.hdrMinimumLuminance, 1e20);
  715. effect.setFloat("averageLuminance", outputLiminance);
  716. lastTime = time;
  717. this._currentDepthOfFieldSource = this.hdrFinalPostProcess;
  718. };
  719. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDR", () => { return this.hdrPostProcess; }, true));
  720. }
  721. // Create lens flare post-process
  722. private _createLensFlarePostProcess(scene: Scene, ratio: number): void {
  723. this.lensFlarePostProcess = new PostProcess("HDRLensFlare", "standard", ["strength", "ghostDispersal", "haloWidth", "resolution", "distortionStrength"], ["lensColorSampler"], ratio / 2, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE", Constants.TEXTURETYPE_UNSIGNED_INT);
  724. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlare", () => { return this.lensFlarePostProcess; }, true));
  725. this._createBlurPostProcesses(scene, ratio / 4, 2);
  726. this.lensFlareComposePostProcess = new PostProcess("HDRLensFlareCompose", "standard", ["lensStarMatrix"], ["otherSampler", "lensDirtSampler", "lensStarSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE_COMPOSE", Constants.TEXTURETYPE_UNSIGNED_INT);
  727. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlareCompose", () => { return this.lensFlareComposePostProcess; }, true));
  728. var resolution = new Vector2(0, 0);
  729. // Lens flare
  730. this.lensFlarePostProcess.onApply = (effect: Effect) => {
  731. effect.setTextureFromPostProcess("textureSampler", this._bloomEnabled ? this.blurHPostProcesses[0] : this.originalPostProcess);
  732. effect.setTexture("lensColorSampler", this.lensColorTexture);
  733. effect.setFloat("strength", this.lensFlareStrength);
  734. effect.setFloat("ghostDispersal", this.lensFlareGhostDispersal);
  735. effect.setFloat("haloWidth", this.lensFlareHaloWidth);
  736. // Shift
  737. resolution.x = (<PostProcess>this.lensFlarePostProcess).width;
  738. resolution.y = (<PostProcess>this.lensFlarePostProcess).height;
  739. effect.setVector2("resolution", resolution);
  740. effect.setFloat("distortionStrength", this.lensFlareDistortionStrength);
  741. };
  742. // Compose
  743. var scaleBias1 = Matrix.FromValues(
  744. 2.0, 0.0, -1.0, 0.0,
  745. 0.0, 2.0, -1.0, 0.0,
  746. 0.0, 0.0, 1.0, 0.0,
  747. 0.0, 0.0, 0.0, 1.0
  748. );
  749. var scaleBias2 = Matrix.FromValues(
  750. 0.5, 0.0, 0.5, 0.0,
  751. 0.0, 0.5, 0.5, 0.0,
  752. 0.0, 0.0, 1.0, 0.0,
  753. 0.0, 0.0, 0.0, 1.0
  754. );
  755. this.lensFlareComposePostProcess.onApply = (effect: Effect) => {
  756. if (!this._scene.activeCamera) {
  757. return;
  758. }
  759. effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource);
  760. effect.setTexture("lensDirtSampler", this.lensFlareDirtTexture);
  761. effect.setTexture("lensStarSampler", this.lensStarTexture);
  762. // Lens start rotation matrix
  763. var camerax = (<Vector4>this._scene.activeCamera.getViewMatrix().getRow(0));
  764. var cameraz = (<Vector4>this._scene.activeCamera.getViewMatrix().getRow(2));
  765. var camRot = Vector3.Dot(camerax.toVector3(), new Vector3(1.0, 0.0, 0.0)) + Vector3.Dot(cameraz.toVector3(), new Vector3(0.0, 0.0, 1.0));
  766. camRot *= 4.0;
  767. var starRotation = Matrix.FromValues(
  768. Math.cos(camRot) * 0.5, -Math.sin(camRot), 0.0, 0.0,
  769. Math.sin(camRot), Math.cos(camRot) * 0.5, 0.0, 0.0,
  770. 0.0, 0.0, 1.0, 0.0,
  771. 0.0, 0.0, 0.0, 1.0
  772. );
  773. var lensStarMatrix = scaleBias2.multiply(starRotation).multiply(scaleBias1);
  774. effect.setMatrix("lensStarMatrix", lensStarMatrix);
  775. this._currentDepthOfFieldSource = this.lensFlareFinalPostProcess;
  776. };
  777. }
  778. // Create depth-of-field post-process
  779. private _createDepthOfFieldPostProcess(scene: Scene, ratio: number): void {
  780. this.depthOfFieldPostProcess = new PostProcess("HDRDepthOfField", "standard", ["distance"], ["otherSampler", "depthSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DEPTH_OF_FIELD", Constants.TEXTURETYPE_UNSIGNED_INT);
  781. this.depthOfFieldPostProcess.onApply = (effect: Effect) => {
  782. effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource);
  783. effect.setTexture("depthSampler", this._getDepthTexture());
  784. effect.setFloat("distance", this.depthOfFieldDistance);
  785. };
  786. // Add to pipeline
  787. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDepthOfField", () => { return this.depthOfFieldPostProcess; }, true));
  788. }
  789. // Create motion blur post-process
  790. private _createMotionBlurPostProcess(scene: Scene, ratio: number): void {
  791. this.motionBlurPostProcess = new PostProcess("HDRMotionBlur", "standard",
  792. ["inverseViewProjection", "prevViewProjection", "screenSize", "motionScale", "motionStrength"],
  793. ["depthSampler"],
  794. ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define MOTION_BLUR\n#define MAX_MOTION_SAMPLES " + this.motionBlurSamples.toFixed(1), Constants.TEXTURETYPE_UNSIGNED_INT);
  795. var motionScale: number = 0;
  796. var prevViewProjection = Matrix.Identity();
  797. var invViewProjection = Matrix.Identity();
  798. var viewProjection = Matrix.Identity();
  799. var screenSize = Vector2.Zero();
  800. this.motionBlurPostProcess.onApply = (effect: Effect) => {
  801. viewProjection = scene.getProjectionMatrix().multiply(scene.getViewMatrix());
  802. viewProjection.invertToRef(invViewProjection);
  803. effect.setMatrix("inverseViewProjection", invViewProjection);
  804. effect.setMatrix("prevViewProjection", prevViewProjection);
  805. prevViewProjection = viewProjection;
  806. screenSize.x = (<PostProcess>this.motionBlurPostProcess).width;
  807. screenSize.y = (<PostProcess>this.motionBlurPostProcess).height;
  808. effect.setVector2("screenSize", screenSize);
  809. motionScale = scene.getEngine().getFps() / 60.0;
  810. effect.setFloat("motionScale", motionScale);
  811. effect.setFloat("motionStrength", this.motionStrength);
  812. effect.setTexture("depthSampler", this._getDepthTexture());
  813. };
  814. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRMotionBlur", () => { return this.motionBlurPostProcess; }, true));
  815. }
  816. private _getDepthTexture(): Texture {
  817. if (this._scene.getEngine().getCaps().drawBuffersExtension) {
  818. let renderer = <GeometryBufferRenderer>this._scene.enableGeometryBufferRenderer();
  819. return renderer.getGBuffer().textures[0];
  820. }
  821. return this._scene.enableDepthRenderer().getDepthMap();
  822. }
  823. private _disposePostProcesses(): void {
  824. for (var i = 0; i < this._cameras.length; i++) {
  825. var camera = this._cameras[i];
  826. if (this.originalPostProcess) { this.originalPostProcess.dispose(camera); }
  827. if (this.downSampleX4PostProcess) { this.downSampleX4PostProcess.dispose(camera); }
  828. if (this.brightPassPostProcess) { this.brightPassPostProcess.dispose(camera); }
  829. if (this.textureAdderPostProcess) { this.textureAdderPostProcess.dispose(camera); }
  830. if (this.textureAdderFinalPostProcess) { this.textureAdderFinalPostProcess.dispose(camera); }
  831. if (this.volumetricLightPostProcess) { this.volumetricLightPostProcess.dispose(camera); }
  832. if (this.volumetricLightSmoothXPostProcess) { this.volumetricLightSmoothXPostProcess.dispose(camera); }
  833. if (this.volumetricLightSmoothYPostProcess) { this.volumetricLightSmoothYPostProcess.dispose(camera); }
  834. if (this.volumetricLightMergePostProces) { this.volumetricLightMergePostProces.dispose(camera); }
  835. if (this.volumetricLightFinalPostProcess) { this.volumetricLightFinalPostProcess.dispose(camera); }
  836. if (this.lensFlarePostProcess) { this.lensFlarePostProcess.dispose(camera); }
  837. if (this.lensFlareComposePostProcess) { this.lensFlareComposePostProcess.dispose(camera); }
  838. for (var j = 0; j < this.luminanceDownSamplePostProcesses.length; j++) {
  839. this.luminanceDownSamplePostProcesses[j].dispose(camera);
  840. }
  841. if (this.luminancePostProcess) { this.luminancePostProcess.dispose(camera); }
  842. if (this.hdrPostProcess) { this.hdrPostProcess.dispose(camera); }
  843. if (this.hdrFinalPostProcess) { this.hdrFinalPostProcess.dispose(camera); }
  844. if (this.depthOfFieldPostProcess) { this.depthOfFieldPostProcess.dispose(camera); }
  845. if (this.motionBlurPostProcess) { this.motionBlurPostProcess.dispose(camera); }
  846. if (this.fxaaPostProcess) { this.fxaaPostProcess.dispose(camera); }
  847. for (var j = 0; j < this.blurHPostProcesses.length; j++) {
  848. this.blurHPostProcesses[j].dispose(camera);
  849. }
  850. for (var j = 0; j < this.blurVPostProcesses.length; j++) {
  851. this.blurVPostProcesses[j].dispose(camera);
  852. }
  853. }
  854. this.originalPostProcess = null;
  855. this.downSampleX4PostProcess = null;
  856. this.brightPassPostProcess = null;
  857. this.textureAdderPostProcess = null;
  858. this.textureAdderFinalPostProcess = null;
  859. this.volumetricLightPostProcess = null;
  860. this.volumetricLightSmoothXPostProcess = null;
  861. this.volumetricLightSmoothYPostProcess = null;
  862. this.volumetricLightMergePostProces = null;
  863. this.volumetricLightFinalPostProcess = null;
  864. this.lensFlarePostProcess = null;
  865. this.lensFlareComposePostProcess = null;
  866. this.luminancePostProcess = null;
  867. this.hdrPostProcess = null;
  868. this.hdrFinalPostProcess = null;
  869. this.depthOfFieldPostProcess = null;
  870. this.motionBlurPostProcess = null;
  871. this.fxaaPostProcess = null;
  872. this.luminanceDownSamplePostProcesses = [];
  873. this.blurHPostProcesses = [];
  874. this.blurVPostProcesses = [];
  875. }
  876. /**
  877. * Dispose of the pipeline and stop all post processes
  878. */
  879. public dispose(): void {
  880. this._disposePostProcesses();
  881. this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
  882. super.dispose();
  883. }
  884. /**
  885. * Serialize the rendering pipeline (Used when exporting)
  886. * @returns the serialized object
  887. */
  888. public serialize(): any {
  889. var serializationObject = SerializationHelper.Serialize(this);
  890. if (this.sourceLight) {
  891. serializationObject.sourceLightId = this.sourceLight.id;
  892. }
  893. serializationObject.customType = "StandardRenderingPipeline";
  894. return serializationObject;
  895. }
  896. /**
  897. * Parse the serialized pipeline
  898. * @param source Source pipeline.
  899. * @param scene The scene to load the pipeline to.
  900. * @param rootUrl The URL of the serialized pipeline.
  901. * @returns An instantiated pipeline from the serialized object.
  902. */
  903. public static Parse(source: any, scene: Scene, rootUrl: string): StandardRenderingPipeline {
  904. var p = SerializationHelper.Parse(() => new StandardRenderingPipeline(source._name, scene, source._ratio), source, scene, rootUrl);
  905. if (source.sourceLightId) {
  906. p.sourceLight = <SpotLight | DirectionalLight> scene.getLightByID(source.sourceLightId);
  907. }
  908. return p;
  909. }
  910. /**
  911. * Luminance steps
  912. */
  913. public static LuminanceSteps: number = 6;
  914. }