babylon.standardRenderingPipeline.ts 48 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075
  1. module BABYLON {
  2. export class StandardRenderingPipeline extends PostProcessRenderPipeline implements IDisposable, IAnimatable {
  3. /**
  4. * Public members
  5. */
  6. // Post-processes
  7. /**
  8. * Post-process which contains the original scene color before the pipeline applies all the effects
  9. */
  10. public originalPostProcess: Nullable<PostProcess>;
  11. /**
  12. * Post-process used to down scale an image x4
  13. */
  14. public downSampleX4PostProcess: Nullable<PostProcess> = null;
  15. /**
  16. * Post-process used to calculate the illuminated surfaces controlled by a threshold
  17. */
  18. public brightPassPostProcess: Nullable<PostProcess> = null;
  19. /**
  20. * Post-process array storing all the horizontal blur post-processes used by the pipeline
  21. */
  22. public blurHPostProcesses: PostProcess[] = [];
  23. /**
  24. * Post-process array storing all the vertical blur post-processes used by the pipeline
  25. */
  26. public blurVPostProcesses: PostProcess[] = [];
  27. /**
  28. * Post-process used to add colors of 2 textures (typically brightness + real scene color)
  29. */
  30. public textureAdderPostProcess: Nullable<PostProcess> = null;
  31. /**
  32. * Post-process used to create volumetric lighting effect
  33. */
  34. public volumetricLightPostProcess: Nullable<PostProcess> = null;
  35. /**
  36. * Post-process used to smooth the previous volumetric light post-process on the X axis
  37. */
  38. public volumetricLightSmoothXPostProcess: Nullable<BlurPostProcess> = null;
  39. /**
  40. * Post-process used to smooth the previous volumetric light post-process on the Y axis
  41. */
  42. public volumetricLightSmoothYPostProcess: Nullable<BlurPostProcess> = null;
  43. /**
  44. * Post-process used to merge the volumetric light effect and the real scene color
  45. */
  46. public volumetricLightMergePostProces: Nullable<PostProcess> = null;
  47. /**
  48. * Post-process used to store the final volumetric light post-process (attach/detach for debug purpose)
  49. */
  50. public volumetricLightFinalPostProcess: Nullable<PostProcess> = null;
  51. /**
  52. * Base post-process used to calculate the average luminance of the final image for HDR
  53. */
  54. public luminancePostProcess: Nullable<PostProcess> = null;
  55. /**
  56. * Post-processes used to create down sample post-processes in order to get
  57. * the average luminance of the final image for HDR
  58. * Array of length "StandardRenderingPipeline.LuminanceSteps"
  59. */
  60. public luminanceDownSamplePostProcesses: PostProcess[] = [];
  61. /**
  62. * Post-process used to create a HDR effect (light adaptation)
  63. */
  64. public hdrPostProcess: Nullable<PostProcess> = null;
  65. /**
  66. * Post-process used to store the final texture adder post-process (attach/detach for debug purpose)
  67. */
  68. public textureAdderFinalPostProcess: Nullable<PostProcess> = null;
  69. /**
  70. * Post-process used to store the final lens flare post-process (attach/detach for debug purpose)
  71. */
  72. public lensFlareFinalPostProcess: Nullable<PostProcess> = null;
  73. /**
  74. * Post-process used to merge the final HDR post-process and the real scene color
  75. */
  76. public hdrFinalPostProcess: Nullable<PostProcess> = null;
  77. /**
  78. * Post-process used to create a lens flare effect
  79. */
  80. public lensFlarePostProcess: Nullable<PostProcess> = null;
  81. /**
  82. * Post-process that merges the result of the lens flare post-process and the real scene color
  83. */
  84. public lensFlareComposePostProcess: Nullable<PostProcess> = null;
  85. /**
  86. * Post-process used to create a motion blur effect
  87. */
  88. public motionBlurPostProcess: Nullable<PostProcess> = null;
  89. /**
  90. * Post-process used to create a depth of field effect
  91. */
  92. public depthOfFieldPostProcess: Nullable<PostProcess> = null;
  93. /**
  94. * The Fast Approximate Anti-Aliasing post process which attemps to remove aliasing from an image.
  95. */
  96. public fxaaPostProcess: Nullable<FxaaPostProcess> = null;
  97. // Values
  98. /**
  99. * Represents the brightness threshold in order to configure the illuminated surfaces
  100. */
  101. @serialize()
  102. public brightThreshold: number = 1.0;
  103. /**
  104. * Configures the blur intensity used for surexposed surfaces are highlighted surfaces (light halo)
  105. */
  106. @serialize()
  107. public blurWidth: number = 512.0;
  108. /**
  109. * Sets if the blur for highlighted surfaces must be only horizontal
  110. */
  111. @serialize()
  112. public horizontalBlur: boolean = false;
  113. /**
  114. * Sets the overall exposure used by the pipeline
  115. */
  116. @serialize()
  117. public exposure: number = 1.0;
  118. /**
  119. * Texture used typically to simulate "dirty" on camera lens
  120. */
  121. @serializeAsTexture("lensTexture")
  122. public lensTexture: Nullable<Texture> = null;
  123. /**
  124. * Represents the offset coefficient based on Rayleigh principle. Typically in interval [-0.2, 0.2]
  125. */
  126. @serialize()
  127. public volumetricLightCoefficient: number = 0.2;
  128. /**
  129. * The overall power of volumetric lights, typically in interval [0, 10] maximum
  130. */
  131. @serialize()
  132. public volumetricLightPower: number = 4.0;
  133. /**
  134. * Used the set the blur intensity to smooth the volumetric lights
  135. */
  136. @serialize()
  137. public volumetricLightBlurScale: number = 64.0;
  138. /**
  139. * Light (spot or directional) used to generate the volumetric lights rays
  140. * The source light must have a shadow generate so the pipeline can get its
  141. * depth map
  142. */
  143. public sourceLight: Nullable<SpotLight |  DirectionalLight> = null;
  144. /**
  145. * For eye adaptation, represents the minimum luminance the eye can see
  146. */
  147. @serialize()
  148. public hdrMinimumLuminance: number = 1.0;
  149. /**
  150. * For eye adaptation, represents the decrease luminance speed
  151. */
  152. @serialize()
  153. public hdrDecreaseRate: number = 0.5;
  154. /**
  155. * For eye adaptation, represents the increase luminance speed
  156. */
  157. @serialize()
  158. public hdrIncreaseRate: number = 0.5;
  159. /**
  160. * Lens color texture used by the lens flare effect. Mandatory if lens flare effect enabled
  161. */
  162. @serializeAsTexture("lensColorTexture")
  163. public lensColorTexture: Nullable<Texture> = null;
  164. /**
  165. * The overall strengh for the lens flare effect
  166. */
  167. @serialize()
  168. public lensFlareStrength: number = 20.0;
  169. /**
  170. * Dispersion coefficient for lens flare ghosts
  171. */
  172. @serialize()
  173. public lensFlareGhostDispersal: number = 1.4;
  174. /**
  175. * Main lens flare halo width
  176. */
  177. @serialize()
  178. public lensFlareHaloWidth: number = 0.7;
  179. /**
  180. * Based on the lens distortion effect, defines how much the lens flare result
  181. * is distorted
  182. */
  183. @serialize()
  184. public lensFlareDistortionStrength: number = 16.0;
  185. /**
  186. * Lens star texture must be used to simulate rays on the flares and is available
  187. * in the documentation
  188. */
  189. @serializeAsTexture("lensStarTexture")
  190. public lensStarTexture: Nullable<Texture> = null;
  191. /**
  192. * As the "lensTexture" (can be the same texture or different), it is used to apply the lens
  193. * flare effect by taking account of the dirt texture
  194. */
  195. @serializeAsTexture("lensFlareDirtTexture")
  196. public lensFlareDirtTexture: Nullable<Texture> = null;
  197. /**
  198. * Represents the focal length for the depth of field effect
  199. */
  200. @serialize()
  201. public depthOfFieldDistance: number = 10.0;
  202. /**
  203. * Represents the blur intensity for the blurred part of the depth of field effect
  204. */
  205. @serialize()
  206. public depthOfFieldBlurWidth: number = 64.0;
  207. /**
  208. * For motion blur, defines how much the image is blurred by the movement
  209. */
  210. @serialize()
  211. public motionStrength: number = 1.0;
  212. /**
  213. * List of animations for the pipeline (IAnimatable implementation)
  214. */
  215. public animations: Animation[] = [];
  216. /**
  217. * Private members
  218. */
  219. private _scene: Scene;
  220. private _currentDepthOfFieldSource: Nullable<PostProcess> = null;
  221. private _basePostProcess: Nullable<PostProcess>;
  222. private _hdrCurrentLuminance: number = 1.0;
  223. private _floatTextureType: number;
  224. @serialize()
  225. private _ratio: number;
  226. // Getters and setters
  227. private _bloomEnabled: boolean = false;
  228. private _depthOfFieldEnabled: boolean = false;
  229. private _vlsEnabled: boolean = false;
  230. private _lensFlareEnabled: boolean = false;
  231. private _hdrEnabled: boolean = false;
  232. private _motionBlurEnabled: boolean = false;
  233. private _fxaaEnabled: boolean = false;
  234. private _motionBlurSamples: number = 64.0;
  235. private _volumetricLightStepsCount: number = 50.0;
  236. private _samples: number = 1;
  237. /**
  238. * Specifies if the bloom pipeline is enabled
  239. */
  240. @serialize()
  241. public get BloomEnabled(): boolean {
  242. return this._bloomEnabled;
  243. }
  244. public set BloomEnabled(enabled: boolean) {
  245. if (this._bloomEnabled === enabled) {
  246. return;
  247. }
  248. this._bloomEnabled = enabled;
  249. this._buildPipeline();
  250. }
  251. /**
  252. * Specifies if the depth of field pipeline is enabed
  253. */
  254. @serialize()
  255. public get DepthOfFieldEnabled(): boolean {
  256. return this._depthOfFieldEnabled;
  257. }
  258. public set DepthOfFieldEnabled(enabled: boolean) {
  259. if (this._depthOfFieldEnabled === enabled) {
  260. return;
  261. }
  262. this._depthOfFieldEnabled = enabled;
  263. this._buildPipeline();
  264. }
  265. /**
  266. * Specifies if the lens flare pipeline is enabed
  267. */
  268. @serialize()
  269. public get LensFlareEnabled(): boolean {
  270. return this._lensFlareEnabled;
  271. }
  272. public set LensFlareEnabled(enabled: boolean) {
  273. if (this._lensFlareEnabled === enabled) {
  274. return;
  275. }
  276. this._lensFlareEnabled = enabled;
  277. this._buildPipeline();
  278. }
  279. /**
  280. * Specifies if the HDR pipeline is enabled
  281. */
  282. @serialize()
  283. public get HDREnabled(): boolean {
  284. return this._hdrEnabled;
  285. }
  286. public set HDREnabled(enabled: boolean) {
  287. if (this._hdrEnabled === enabled) {
  288. return;
  289. }
  290. this._hdrEnabled = enabled;
  291. this._buildPipeline();
  292. }
  293. /**
  294. * Specifies if the volumetric lights scattering effect is enabled
  295. */
  296. @serialize()
  297. public get VLSEnabled(): boolean {
  298. return this._vlsEnabled;
  299. }
  300. public set VLSEnabled(enabled) {
  301. if (this._vlsEnabled === enabled) {
  302. return;
  303. }
  304. if (enabled) {
  305. var geometry = this._scene.enableGeometryBufferRenderer();
  306. if (!geometry) {
  307. Tools.Warn("Geometry renderer is not supported, cannot create volumetric lights in Standard Rendering Pipeline");
  308. return;
  309. }
  310. }
  311. this._vlsEnabled = enabled;
  312. this._buildPipeline();
  313. }
  314. /**
  315. * Specifies if the motion blur effect is enabled
  316. */
  317. @serialize()
  318. public get MotionBlurEnabled(): boolean {
  319. return this._motionBlurEnabled;
  320. }
  321. public set MotionBlurEnabled(enabled: boolean) {
  322. if (this._motionBlurEnabled === enabled) {
  323. return;
  324. }
  325. this._motionBlurEnabled = enabled;
  326. this._buildPipeline();
  327. }
  328. /**
  329. * Specifies if anti-aliasing is enabled
  330. */
  331. @serialize()
  332. public get fxaaEnabled(): boolean {
  333. return this._fxaaEnabled;
  334. }
  335. public set fxaaEnabled(enabled: boolean) {
  336. if (this._fxaaEnabled === enabled) {
  337. return;
  338. }
  339. this._fxaaEnabled = enabled;
  340. this._buildPipeline();
  341. }
  342. /**
  343. * Specifies the number of steps used to calculate the volumetric lights
  344. * Typically in interval [50, 200]
  345. */
  346. @serialize()
  347. public get volumetricLightStepsCount(): number {
  348. return this._volumetricLightStepsCount;
  349. }
  350. public set volumetricLightStepsCount(count: number)  {
  351. if (this.volumetricLightPostProcess) {
  352. this.volumetricLightPostProcess.updateEffect("#define VLS\n#define NB_STEPS " + count.toFixed(1));
  353. }
  354. this._volumetricLightStepsCount = count;
  355. }
  356. /**
  357. * Specifies the number of samples used for the motion blur effect
  358. * Typically in interval [16, 64]
  359. */
  360. @serialize()
  361. public get motionBlurSamples(): number {
  362. return this._motionBlurSamples;
  363. }
  364. public set motionBlurSamples(samples: number) {
  365. if (this.motionBlurPostProcess) {
  366. this.motionBlurPostProcess.updateEffect("#define MOTION_BLUR\n#define MAX_MOTION_SAMPLES " + samples.toFixed(1));
  367. }
  368. this._motionBlurSamples = samples;
  369. }
  370. /**
  371. * Specifies MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1)
  372. */
  373. @serialize()
  374. public get samples(): number {
  375. return this._samples;
  376. }
  377. public set samples(sampleCount: number) {
  378. if (this._samples === sampleCount) {
  379. return;
  380. }
  381. this._samples = sampleCount;
  382. this._buildPipeline();
  383. }
  384. /**
  385. * @constructor
  386. * @param {string} name - The rendering pipeline name
  387. * @param {BABYLON.Scene} scene - The scene linked to this pipeline
  388. * @param {any} ratio - The size of the postprocesses (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5)
  389. * @param {BABYLON.PostProcess} originalPostProcess - the custom original color post-process. Must be "reusable". Can be null.
  390. * @param {BABYLON.Camera[]} cameras - The array of cameras that the rendering pipeline will be attached to
  391. */
  392. constructor(name: string, scene: Scene, ratio: number, originalPostProcess: Nullable<PostProcess> = null, cameras?: Camera[]) {
  393. super(scene.getEngine(), name);
  394. this._cameras = cameras ||  [];
  395. // Initialize
  396. this._scene = scene;
  397. this._basePostProcess = originalPostProcess;
  398. this._ratio = ratio;
  399. // Misc
  400. this._floatTextureType = scene.getEngine().getCaps().textureFloatRender ? Engine.TEXTURETYPE_FLOAT : Engine.TEXTURETYPE_HALF_FLOAT;
  401. // Finish
  402. scene.postProcessRenderPipelineManager.addPipeline(this);
  403. this._buildPipeline();
  404. }
  405. private _buildPipeline(): void {
  406. var ratio = this._ratio;
  407. var scene = this._scene;
  408. this._disposePostProcesses();
  409. this._reset();
  410. // Create pass post-process
  411. if (!this._basePostProcess) {
  412. this.originalPostProcess = new PostProcess("HDRPass", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", this._floatTextureType);
  413. this.originalPostProcess.onApply = (effect: Effect) => {
  414. this._currentDepthOfFieldSource = this.originalPostProcess;
  415. };
  416. }
  417. else {
  418. this.originalPostProcess = this._basePostProcess;
  419. }
  420. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPassPostProcess", () => { return this.originalPostProcess; }, true));
  421. this._currentDepthOfFieldSource = this.originalPostProcess;
  422. if (this._bloomEnabled) {
  423. // Create down sample X4 post-process
  424. this._createDownSampleX4PostProcess(scene, ratio / 2);
  425. // Create bright pass post-process
  426. this._createBrightPassPostProcess(scene, ratio / 2);
  427. // Create gaussian blur post-processes (down sampling blurs)
  428. this._createBlurPostProcesses(scene, ratio / 4, 1);
  429. // Create texture adder post-process
  430. this._createTextureAdderPostProcess(scene, ratio);
  431. // Create depth-of-field source post-process
  432. this.textureAdderFinalPostProcess = new PostProcess("HDRDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Engine.TEXTURETYPE_UNSIGNED_INT);
  433. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBaseDepthOfFieldSource", () => { return this.textureAdderFinalPostProcess; }, true));
  434. }
  435. if (this._vlsEnabled) {
  436. // Create volumetric light
  437. this._createVolumetricLightPostProcess(scene, ratio);
  438. // Create volumetric light final post-process
  439. this.volumetricLightFinalPostProcess = new PostProcess("HDRVLSFinal", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Engine.TEXTURETYPE_UNSIGNED_INT);
  440. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSFinal", () => { return this.volumetricLightFinalPostProcess; }, true));
  441. }
  442. if (this._lensFlareEnabled) {
  443. // Create lens flare post-process
  444. this._createLensFlarePostProcess(scene, ratio);
  445. // Create depth-of-field source post-process post lens-flare and disable it now
  446. this.lensFlareFinalPostProcess = new PostProcess("HDRPostLensFlareDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Engine.TEXTURETYPE_UNSIGNED_INT);
  447. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostLensFlareDepthOfFieldSource", () => { return this.lensFlareFinalPostProcess; }, true));
  448. }
  449. if (this._hdrEnabled) {
  450. // Create luminance
  451. this._createLuminancePostProcesses(scene, this._floatTextureType);
  452. // Create HDR
  453. this._createHdrPostProcess(scene, ratio);
  454. // Create depth-of-field source post-process post hdr and disable it now
  455. this.hdrFinalPostProcess = new PostProcess("HDRPostHDReDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Engine.TEXTURETYPE_UNSIGNED_INT);
  456. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostHDReDepthOfFieldSource", () => { return this.hdrFinalPostProcess; }, true));
  457. }
  458. if (this._depthOfFieldEnabled) {
  459. // Create gaussian blur used by depth-of-field
  460. this._createBlurPostProcesses(scene, ratio / 2, 3, "depthOfFieldBlurWidth");
  461. // Create depth-of-field post-process
  462. this._createDepthOfFieldPostProcess(scene, ratio);
  463. }
  464. if (this._motionBlurEnabled) {
  465. // Create motion blur post-process
  466. this._createMotionBlurPostProcess(scene, ratio);
  467. }
  468. if (this._fxaaEnabled) {
  469. // Create fxaa post-process
  470. this.fxaaPostProcess = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Engine.TEXTURETYPE_UNSIGNED_INT);
  471. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRFxaa", () => { return this.fxaaPostProcess; }, true));
  472. }
  473. if (this._cameras !== null) {
  474. this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
  475. }
  476. if (!this._enableMSAAOnFirstPostProcess(this._samples) && this._samples > 1){
  477. BABYLON.Tools.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0");
  478. }
  479. }
  480. // Down Sample X4 Post-Processs
  481. private _createDownSampleX4PostProcess(scene: Scene, ratio: number): void {
  482. var downSampleX4Offsets = new Array<number>(32);
  483. this.downSampleX4PostProcess = new PostProcess("HDRDownSampleX4", "standard", ["dsOffsets"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DOWN_SAMPLE_X4", Engine.TEXTURETYPE_UNSIGNED_INT);
  484. this.downSampleX4PostProcess.onApply = (effect: Effect) => {
  485. var id = 0;
  486. let width = (<PostProcess>this.downSampleX4PostProcess).width;
  487. let height = (<PostProcess>this.downSampleX4PostProcess).height;
  488. for (var i = -2; i < 2; i++) {
  489. for (var j = -2; j < 2; j++) {
  490. downSampleX4Offsets[id] = (i + 0.5) * (1.0 / width);
  491. downSampleX4Offsets[id + 1] = (j + 0.5) * (1.0 / height);
  492. id += 2;
  493. }
  494. }
  495. effect.setArray2("dsOffsets", downSampleX4Offsets);
  496. };
  497. // Add to pipeline
  498. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDownSampleX4", () => { return this.downSampleX4PostProcess; }, true));
  499. }
  500. // Brightpass Post-Process
  501. private _createBrightPassPostProcess(scene: Scene, ratio: number): void {
  502. var brightOffsets = new Array<number>(8);
  503. this.brightPassPostProcess = new PostProcess("HDRBrightPass", "standard", ["dsOffsets", "brightThreshold"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define BRIGHT_PASS", Engine.TEXTURETYPE_UNSIGNED_INT);
  504. this.brightPassPostProcess.onApply = (effect: Effect) => {
  505. var sU = (1.0 / (<PostProcess>this.brightPassPostProcess).width);
  506. var sV = (1.0 / (<PostProcess>this.brightPassPostProcess).height);
  507. brightOffsets[0] = -0.5 * sU;
  508. brightOffsets[1] = 0.5 * sV;
  509. brightOffsets[2] = 0.5 * sU;
  510. brightOffsets[3] = 0.5 * sV;
  511. brightOffsets[4] = -0.5 * sU;
  512. brightOffsets[5] = -0.5 * sV;
  513. brightOffsets[6] = 0.5 * sU;
  514. brightOffsets[7] = -0.5 * sV;
  515. effect.setArray2("dsOffsets", brightOffsets);
  516. effect.setFloat("brightThreshold", this.brightThreshold);
  517. }
  518. // Add to pipeline
  519. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBrightPass", () => { return this.brightPassPostProcess; }, true));
  520. }
  521. // Create blur H&V post-processes
  522. private _createBlurPostProcesses(scene: Scene, ratio: number, indice: number, blurWidthKey: string = "blurWidth"): void {
  523. var engine = scene.getEngine();
  524. var blurX = new BlurPostProcess("HDRBlurH" + "_" + indice, new Vector2(1, 0), (<any>this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Engine.TEXTURETYPE_UNSIGNED_INT);
  525. var blurY = new BlurPostProcess("HDRBlurV" + "_" + indice, new Vector2(0, 1), (<any>this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Engine.TEXTURETYPE_UNSIGNED_INT);
  526. blurX.onActivateObservable.add(() => {
  527. let dw = blurX.width / engine.getRenderWidth();
  528. blurX.kernel = (<any>this)[blurWidthKey] * dw;
  529. });
  530. blurY.onActivateObservable.add(() => {
  531. let dw = blurY.height / engine.getRenderHeight();
  532. blurY.kernel = this.horizontalBlur ? 64 * dw : (<any>this)[blurWidthKey] * dw;
  533. });
  534. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBlurH" + indice, () => { return blurX; }, true));
  535. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBlurV" + indice, () => { return blurY; }, true));
  536. this.blurHPostProcesses.push(blurX);
  537. this.blurVPostProcesses.push(blurY);
  538. }
  539. // Create texture adder post-process
  540. private _createTextureAdderPostProcess(scene: Scene, ratio: number): void {
  541. this.textureAdderPostProcess = new PostProcess("HDRTextureAdder", "standard", ["exposure"], ["otherSampler", "lensSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define TEXTURE_ADDER", Engine.TEXTURETYPE_UNSIGNED_INT);
  542. this.textureAdderPostProcess.onApply = (effect: Effect) => {
  543. effect.setTextureFromPostProcess("otherSampler", this._vlsEnabled ? this._currentDepthOfFieldSource : this.originalPostProcess);
  544. effect.setTexture("lensSampler", this.lensTexture);
  545. effect.setFloat("exposure", this.exposure);
  546. this._currentDepthOfFieldSource = this.textureAdderFinalPostProcess;
  547. };
  548. // Add to pipeline
  549. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRTextureAdder", () => { return this.textureAdderPostProcess; }, true));
  550. }
  551. private _createVolumetricLightPostProcess(scene: Scene, ratio: number): void {
  552. var geometryRenderer = <GeometryBufferRenderer>scene.enableGeometryBufferRenderer();
  553. geometryRenderer.enablePosition = true;
  554. var geometry = geometryRenderer.getGBuffer();
  555. // Base post-process
  556. this.volumetricLightPostProcess = new PostProcess("HDRVLS", "standard",
  557. ["shadowViewProjection", "cameraPosition", "sunDirection", "sunColor", "scatteringCoefficient", "scatteringPower", "depthValues"],
  558. ["shadowMapSampler", "positionSampler"],
  559. ratio / 8,
  560. null,
  561. Texture.BILINEAR_SAMPLINGMODE,
  562. scene.getEngine(),
  563. false, "#define VLS\n#define NB_STEPS " + this._volumetricLightStepsCount.toFixed(1));
  564. var depthValues = Vector2.Zero();
  565. this.volumetricLightPostProcess.onApply = (effect: Effect) => {
  566. if (this.sourceLight && this.sourceLight.getShadowGenerator() && this._scene.activeCamera) {
  567. var generator = this.sourceLight.getShadowGenerator()!;
  568. effect.setTexture("shadowMapSampler", generator.getShadowMap());
  569. effect.setTexture("positionSampler", geometry.textures[2]);
  570. effect.setColor3("sunColor", this.sourceLight.diffuse);
  571. effect.setVector3("sunDirection", this.sourceLight.getShadowDirection());
  572. effect.setVector3("cameraPosition", this._scene.activeCamera.globalPosition);
  573. effect.setMatrix("shadowViewProjection", generator.getTransformMatrix());
  574. effect.setFloat("scatteringCoefficient", this.volumetricLightCoefficient);
  575. effect.setFloat("scatteringPower", this.volumetricLightPower);
  576. depthValues.x = this.sourceLight.getDepthMinZ(this._scene.activeCamera);
  577. depthValues.y = this.sourceLight.getDepthMaxZ(this._scene.activeCamera);
  578. effect.setVector2("depthValues", depthValues);
  579. }
  580. };
  581. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLS", () => { return this.volumetricLightPostProcess; }, true));
  582. // Smooth
  583. this._createBlurPostProcesses(scene, ratio / 4, 0, "volumetricLightBlurScale");
  584. // Merge
  585. this.volumetricLightMergePostProces = new PostProcess("HDRVLSMerge", "standard", [], ["originalSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define VLSMERGE");
  586. this.volumetricLightMergePostProces.onApply = (effect: Effect) => {
  587. effect.setTextureFromPostProcess("originalSampler", this._bloomEnabled ? this.textureAdderFinalPostProcess : this.originalPostProcess);
  588. this._currentDepthOfFieldSource = this.volumetricLightFinalPostProcess;
  589. };
  590. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSMerge", () => { return this.volumetricLightMergePostProces; }, true));
  591. }
  592. // Create luminance
  593. private _createLuminancePostProcesses(scene: Scene, textureType: number): void {
  594. // Create luminance
  595. var size = Math.pow(3, StandardRenderingPipeline.LuminanceSteps);
  596. this.luminancePostProcess = new PostProcess("HDRLuminance", "standard", ["lumOffsets"], [], { width: size, height: size }, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LUMINANCE", textureType);
  597. var offsets: number[] = [];
  598. this.luminancePostProcess.onApply = (effect: Effect) => {
  599. var sU = (1.0 / (<PostProcess>this.luminancePostProcess).width);
  600. var sV = (1.0 / (<PostProcess>this.luminancePostProcess).height);
  601. offsets[0] = -0.5 * sU;
  602. offsets[1] = 0.5 * sV;
  603. offsets[2] = 0.5 * sU;
  604. offsets[3] = 0.5 * sV;
  605. offsets[4] = -0.5 * sU;
  606. offsets[5] = -0.5 * sV;
  607. offsets[6] = 0.5 * sU;
  608. offsets[7] = -0.5 * sV;
  609. effect.setArray2("lumOffsets", offsets);
  610. };
  611. // Add to pipeline
  612. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLuminance", () => { return this.luminancePostProcess; }, true));
  613. // Create down sample luminance
  614. for (var i = StandardRenderingPipeline.LuminanceSteps - 1; i >= 0; i--) {
  615. var size = Math.pow(3, i);
  616. var defines = "#define LUMINANCE_DOWN_SAMPLE\n";
  617. if (i === 0) {
  618. defines += "#define FINAL_DOWN_SAMPLER";
  619. }
  620. var postProcess = new PostProcess("HDRLuminanceDownSample" + i, "standard", ["dsOffsets", "halfDestPixelSize"], [], { width: size, height: size }, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, defines, textureType);
  621. this.luminanceDownSamplePostProcesses.push(postProcess);
  622. }
  623. // Create callbacks and add effects
  624. var lastLuminance: Nullable<PostProcess> = this.luminancePostProcess;
  625. this.luminanceDownSamplePostProcesses.forEach((pp, index) => {
  626. var downSampleOffsets = new Array<number>(18);
  627. pp.onApply = (effect: Effect) => {
  628. if (!lastLuminance) {
  629. return;
  630. }
  631. var id = 0;
  632. for (var x = -1; x < 2; x++) {
  633. for (var y = -1; y < 2; y++) {
  634. downSampleOffsets[id] = x / lastLuminance.width;
  635. downSampleOffsets[id + 1] = y / lastLuminance.height;
  636. id += 2;
  637. }
  638. }
  639. effect.setArray2("dsOffsets", downSampleOffsets);
  640. effect.setFloat("halfDestPixelSize", 0.5 / lastLuminance.width);
  641. if (index === this.luminanceDownSamplePostProcesses.length - 1) {
  642. lastLuminance = this.luminancePostProcess;
  643. } else {
  644. lastLuminance = pp;
  645. }
  646. };
  647. if (index === this.luminanceDownSamplePostProcesses.length - 1) {
  648. pp.onAfterRender = (effect: Effect) => {
  649. var pixel = scene.getEngine().readPixels(0, 0, 1, 1);
  650. var bit_shift = new Vector4(1.0 / (255.0 * 255.0 * 255.0), 1.0 / (255.0 * 255.0), 1.0 / 255.0, 1.0);
  651. this._hdrCurrentLuminance = (pixel[0] * bit_shift.x + pixel[1] * bit_shift.y + pixel[2] * bit_shift.z + pixel[3] * bit_shift.w) / 100.0;
  652. };
  653. }
  654. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLuminanceDownSample" + index, () => { return pp; }, true));
  655. });
  656. }
  657. // Create HDR post-process
  658. private _createHdrPostProcess(scene: Scene, ratio: number): void {
  659. this.hdrPostProcess = new PostProcess("HDR", "standard", ["averageLuminance"], ["textureAdderSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define HDR", Engine.TEXTURETYPE_UNSIGNED_INT);
  660. var outputLiminance = 1;
  661. var time = 0;
  662. var lastTime = 0;
  663. this.hdrPostProcess.onApply = (effect: Effect) => {
  664. effect.setTextureFromPostProcess("textureAdderSampler", this._currentDepthOfFieldSource);
  665. time += scene.getEngine().getDeltaTime();
  666. if (outputLiminance < 0) {
  667. outputLiminance = this._hdrCurrentLuminance;
  668. } else {
  669. var dt = (lastTime - time) / 1000.0;
  670. if (this._hdrCurrentLuminance < outputLiminance + this.hdrDecreaseRate * dt) {
  671. outputLiminance += this.hdrDecreaseRate * dt;
  672. }
  673. else if (this._hdrCurrentLuminance > outputLiminance - this.hdrIncreaseRate * dt) {
  674. outputLiminance -= this.hdrIncreaseRate * dt;
  675. }
  676. else {
  677. outputLiminance = this._hdrCurrentLuminance;
  678. }
  679. }
  680. outputLiminance = Scalar.Clamp(outputLiminance, this.hdrMinimumLuminance, 1e20);
  681. effect.setFloat("averageLuminance", outputLiminance);
  682. lastTime = time;
  683. this._currentDepthOfFieldSource = this.hdrFinalPostProcess;
  684. };
  685. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDR", () => { return this.hdrPostProcess; }, true));
  686. }
  687. // Create lens flare post-process
  688. private _createLensFlarePostProcess(scene: Scene, ratio: number): void {
  689. this.lensFlarePostProcess = new PostProcess("HDRLensFlare", "standard", ["strength", "ghostDispersal", "haloWidth", "resolution", "distortionStrength"], ["lensColorSampler"], ratio / 2, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE", Engine.TEXTURETYPE_UNSIGNED_INT);
  690. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlare", () => { return this.lensFlarePostProcess; }, true));
  691. this._createBlurPostProcesses(scene, ratio / 4, 2);
  692. this.lensFlareComposePostProcess = new PostProcess("HDRLensFlareCompose", "standard", ["lensStarMatrix"], ["otherSampler", "lensDirtSampler", "lensStarSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE_COMPOSE", Engine.TEXTURETYPE_UNSIGNED_INT);
  693. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlareCompose", () => { return this.lensFlareComposePostProcess; }, true));
  694. var resolution = new Vector2(0, 0);
  695. // Lens flare
  696. this.lensFlarePostProcess.onApply = (effect: Effect) => {
  697. effect.setTextureFromPostProcess("textureSampler", this._bloomEnabled ? this.blurHPostProcesses[0] : this.originalPostProcess);
  698. effect.setTexture("lensColorSampler", this.lensColorTexture);
  699. effect.setFloat("strength", this.lensFlareStrength);
  700. effect.setFloat("ghostDispersal", this.lensFlareGhostDispersal);
  701. effect.setFloat("haloWidth", this.lensFlareHaloWidth);
  702. // Shift
  703. resolution.x = (<PostProcess>this.lensFlarePostProcess).width;
  704. resolution.y = (<PostProcess>this.lensFlarePostProcess).height;
  705. effect.setVector2("resolution", resolution);
  706. effect.setFloat("distortionStrength", this.lensFlareDistortionStrength);
  707. };
  708. // Compose
  709. var scaleBias1 = Matrix.FromValues(
  710. 2.0, 0.0, -1.0, 0.0,
  711. 0.0, 2.0, -1.0, 0.0,
  712. 0.0, 0.0, 1.0, 0.0,
  713. 0.0, 0.0, 0.0, 1.0
  714. );
  715. var scaleBias2 = Matrix.FromValues(
  716. 0.5, 0.0, 0.5, 0.0,
  717. 0.0, 0.5, 0.5, 0.0,
  718. 0.0, 0.0, 1.0, 0.0,
  719. 0.0, 0.0, 0.0, 1.0
  720. );
  721. this.lensFlareComposePostProcess.onApply = (effect: Effect) => {
  722. if (!this._scene.activeCamera) {
  723. return;
  724. }
  725. effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource);
  726. effect.setTexture("lensDirtSampler", this.lensFlareDirtTexture);
  727. effect.setTexture("lensStarSampler", this.lensStarTexture);
  728. // Lens start rotation matrix
  729. var camerax = (<Vector4>this._scene.activeCamera.getViewMatrix().getRow(0));
  730. var cameraz = (<Vector4>this._scene.activeCamera.getViewMatrix().getRow(2));
  731. var camRot = Vector3.Dot(camerax.toVector3(), new Vector3(1.0, 0.0, 0.0)) + Vector3.Dot(cameraz.toVector3(), new Vector3(0.0, 0.0, 1.0));
  732. camRot *= 4.0;
  733. var starRotation = Matrix.FromValues(
  734. Math.cos(camRot) * 0.5, -Math.sin(camRot), 0.0, 0.0,
  735. Math.sin(camRot), Math.cos(camRot) * 0.5, 0.0, 0.0,
  736. 0.0, 0.0, 1.0, 0.0,
  737. 0.0, 0.0, 0.0, 1.0
  738. );
  739. var lensStarMatrix = scaleBias2.multiply(starRotation).multiply(scaleBias1);
  740. effect.setMatrix("lensStarMatrix", lensStarMatrix);
  741. this._currentDepthOfFieldSource = this.lensFlareFinalPostProcess;
  742. };
  743. }
  744. // Create depth-of-field post-process
  745. private _createDepthOfFieldPostProcess(scene: Scene, ratio: number): void {
  746. this.depthOfFieldPostProcess = new PostProcess("HDRDepthOfField", "standard", ["distance"], ["otherSampler", "depthSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DEPTH_OF_FIELD", Engine.TEXTURETYPE_UNSIGNED_INT);
  747. this.depthOfFieldPostProcess.onApply = (effect: Effect) => {
  748. effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource);
  749. effect.setTexture("depthSampler", this._getDepthTexture());
  750. effect.setFloat("distance", this.depthOfFieldDistance);
  751. };
  752. // Add to pipeline
  753. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDepthOfField", () => { return this.depthOfFieldPostProcess; }, true));
  754. }
  755. // Create motion blur post-process
  756. private _createMotionBlurPostProcess(scene: Scene, ratio: number): void {
  757. this.motionBlurPostProcess = new PostProcess("HDRMotionBlur", "standard",
  758. ["inverseViewProjection", "prevViewProjection", "screenSize", "motionScale", "motionStrength"],
  759. ["depthSampler"],
  760. ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define MOTION_BLUR\n#define MAX_MOTION_SAMPLES " + this.motionBlurSamples.toFixed(1), Engine.TEXTURETYPE_UNSIGNED_INT);
  761. var motionScale: number = 0;
  762. var prevViewProjection = Matrix.Identity();
  763. var invViewProjection = Matrix.Identity();
  764. var viewProjection = Matrix.Identity();
  765. var screenSize = Vector2.Zero();
  766. this.motionBlurPostProcess.onApply = (effect: Effect) => {
  767. viewProjection = scene.getProjectionMatrix().multiply(scene.getViewMatrix());
  768. viewProjection.invertToRef(invViewProjection);
  769. effect.setMatrix("inverseViewProjection", invViewProjection);
  770. effect.setMatrix("prevViewProjection", prevViewProjection);
  771. prevViewProjection = viewProjection;
  772. screenSize.x = (<PostProcess>this.motionBlurPostProcess).width;
  773. screenSize.y = (<PostProcess>this.motionBlurPostProcess).height;
  774. effect.setVector2("screenSize", screenSize);
  775. motionScale = scene.getEngine().getFps() / 60.0;
  776. effect.setFloat("motionScale", motionScale);
  777. effect.setFloat("motionStrength", this.motionStrength);
  778. effect.setTexture("depthSampler", this._getDepthTexture());
  779. };
  780. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRMotionBlur", () => { return this.motionBlurPostProcess; }, true));
  781. }
  782. private _getDepthTexture(): Texture {
  783. if (this._scene.getEngine().getCaps().drawBuffersExtension) {
  784. let renderer = <GeometryBufferRenderer>this._scene.enableGeometryBufferRenderer();
  785. return renderer.getGBuffer().textures[0];
  786. }
  787. return this._scene.enableDepthRenderer().getDepthMap();
  788. }
  789. private _disposePostProcesses(): void {
  790. for (var i = 0; i < this._cameras.length; i++) {
  791. var camera = this._cameras[i];
  792. if (this.originalPostProcess) { this.originalPostProcess.dispose(camera); }
  793. if (this.downSampleX4PostProcess) { this.downSampleX4PostProcess.dispose(camera); }
  794. if (this.brightPassPostProcess) { this.brightPassPostProcess.dispose(camera); }
  795. if (this.textureAdderPostProcess) { this.textureAdderPostProcess.dispose(camera); }
  796. if (this.textureAdderFinalPostProcess) { this.textureAdderFinalPostProcess.dispose(camera); }
  797. if (this.volumetricLightPostProcess) { this.volumetricLightPostProcess.dispose(camera); }
  798. if (this.volumetricLightSmoothXPostProcess) { this.volumetricLightSmoothXPostProcess.dispose(camera); }
  799. if (this.volumetricLightSmoothYPostProcess) { this.volumetricLightSmoothYPostProcess.dispose(camera); }
  800. if (this.volumetricLightMergePostProces) { this.volumetricLightMergePostProces.dispose(camera); }
  801. if (this.volumetricLightFinalPostProcess) { this.volumetricLightFinalPostProcess.dispose(camera); }
  802. if (this.lensFlarePostProcess) { this.lensFlarePostProcess.dispose(camera); }
  803. if (this.lensFlareComposePostProcess) { this.lensFlareComposePostProcess.dispose(camera); }
  804. for (var j = 0; j < this.luminanceDownSamplePostProcesses.length; j++) {
  805. this.luminanceDownSamplePostProcesses[j].dispose(camera);
  806. }
  807. if (this.luminancePostProcess) { this.luminancePostProcess.dispose(camera); }
  808. if (this.hdrPostProcess) { this.hdrPostProcess.dispose(camera); }
  809. if (this.hdrFinalPostProcess) { this.hdrFinalPostProcess.dispose(camera); }
  810. if (this.depthOfFieldPostProcess) { this.depthOfFieldPostProcess.dispose(camera); }
  811. if (this.motionBlurPostProcess) { this.motionBlurPostProcess.dispose(camera); }
  812. if (this.fxaaPostProcess) { this.fxaaPostProcess.dispose(camera); }
  813. for (var j = 0; j < this.blurHPostProcesses.length; j++) {
  814. this.blurHPostProcesses[j].dispose(camera);
  815. }
  816. for (var j = 0; j < this.blurVPostProcesses.length; j++) {
  817. this.blurVPostProcesses[j].dispose(camera);
  818. }
  819. }
  820. this.originalPostProcess = null;
  821. this.downSampleX4PostProcess = null;
  822. this.brightPassPostProcess = null;
  823. this.textureAdderPostProcess = null;
  824. this.textureAdderFinalPostProcess = null;
  825. this.volumetricLightPostProcess = null;
  826. this.volumetricLightSmoothXPostProcess = null;
  827. this.volumetricLightSmoothYPostProcess = null;
  828. this.volumetricLightMergePostProces = null;
  829. this.volumetricLightFinalPostProcess = null;
  830. this.lensFlarePostProcess = null;
  831. this.lensFlareComposePostProcess = null;
  832. this.luminancePostProcess = null;
  833. this.hdrPostProcess = null;
  834. this.hdrFinalPostProcess = null;
  835. this.depthOfFieldPostProcess = null;
  836. this.motionBlurPostProcess = null;
  837. this.fxaaPostProcess = null;
  838. this.luminanceDownSamplePostProcesses = [];
  839. this.blurHPostProcesses = [];
  840. this.blurVPostProcesses = [];
  841. }
  842. /**
  843. * Dispose of the pipeline and stop all post processes
  844. */
  845. public dispose(): void {
  846. this._disposePostProcesses();
  847. this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
  848. super.dispose();
  849. }
  850. /**
  851. * Serialize the rendering pipeline (Used when exporting)
  852. * @returns the serialized object
  853. */
  854. public serialize(): any {
  855. var serializationObject = SerializationHelper.Serialize(this);
  856. if (this.sourceLight) {
  857. serializationObject.sourceLightId = this.sourceLight.id;
  858. }
  859. serializationObject.customType = "StandardRenderingPipeline";
  860. return serializationObject;
  861. }
  862. /**
  863. * Parse the serialized pipeline
  864. * @param source Source pipeline.
  865. * @param scene The scene to load the pipeline to.
  866. * @param rootUrl The URL of the serialized pipeline.
  867. * @returns An instantiated pipeline from the serialized object.
  868. */
  869. public static Parse(source: any, scene: Scene, rootUrl: string): StandardRenderingPipeline {
  870. var p = SerializationHelper.Parse(() => new StandardRenderingPipeline(source._name, scene, source._ratio), source, scene, rootUrl);
  871. if (source.sourceLightId) {
  872. p.sourceLight = <SpotLight | DirectionalLight> scene.getLightByID(source.sourceLightId);
  873. }
  874. return p;
  875. }
  876. // Luminance steps
  877. public static LuminanceSteps: number = 6;
  878. }
  879. }