standardRenderingPipeline.ts 52 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253
  1. import { Nullable } from "../../../types";
  2. import { serialize, serializeAsTexture, SerializationHelper } from "../../../Misc/decorators";
  3. import { IAnimatable } from '../../../Animations/animatable.interface';
  4. import { Logger } from "../../../Misc/logger";
  5. import { Vector2, Vector3, Matrix, Vector4 } from "../../../Maths/math.vector";
  6. import { Scalar } from "../../../Maths/math.scalar";
  7. import { Camera } from "../../../Cameras/camera";
  8. import { Effect } from "../../../Materials/effect";
  9. import { Texture } from "../../../Materials/Textures/texture";
  10. import { PostProcess } from "../../../PostProcesses/postProcess";
  11. import { PostProcessRenderPipeline } from "../../../PostProcesses/RenderPipeline/postProcessRenderPipeline";
  12. import { PostProcessRenderEffect } from "../../../PostProcesses/RenderPipeline/postProcessRenderEffect";
  13. import { BlurPostProcess } from "../../../PostProcesses/blurPostProcess";
  14. import { FxaaPostProcess } from "../../../PostProcesses/fxaaPostProcess";
  15. import { IDisposable } from "../../../scene";
  16. import { SpotLight } from "../../../Lights/spotLight";
  17. import { DirectionalLight } from "../../../Lights/directionalLight";
  18. import { GeometryBufferRenderer } from "../../../Rendering/geometryBufferRenderer";
  19. import { Scene } from "../../../scene";
  20. import { Constants } from "../../../Engines/constants";
  21. import { _TypeStore } from '../../../Misc/typeStore';
  22. import { MotionBlurPostProcess } from "../../motionBlurPostProcess";
  23. import { ScreenSpaceReflectionPostProcess } from "../../screenSpaceReflectionPostProcess";
  24. declare type Animation = import("../../../Animations/animation").Animation;
  25. import "../../../PostProcesses/RenderPipeline/postProcessRenderPipelineManagerSceneComponent";
  26. import "../../../Shaders/standard.fragment";
  27. /**
  28. * Standard rendering pipeline
  29. * Default pipeline should be used going forward but the standard pipeline will be kept for backwards compatibility.
  30. * @see https://doc.babylonjs.com/how_to/using_standard_rendering_pipeline
  31. */
  32. export class StandardRenderingPipeline extends PostProcessRenderPipeline implements IDisposable, IAnimatable {
  33. /**
  34. * Public members
  35. */
  36. // Post-processes
  37. /**
  38. * Post-process which contains the original scene color before the pipeline applies all the effects
  39. */
  40. public originalPostProcess: Nullable<PostProcess>;
  41. /**
  42. * Post-process used to down scale an image x4
  43. */
  44. public downSampleX4PostProcess: Nullable<PostProcess> = null;
  45. /**
  46. * Post-process used to calculate the illuminated surfaces controlled by a threshold
  47. */
  48. public brightPassPostProcess: Nullable<PostProcess> = null;
  49. /**
  50. * Post-process array storing all the horizontal blur post-processes used by the pipeline
  51. */
  52. public blurHPostProcesses: PostProcess[] = [];
  53. /**
  54. * Post-process array storing all the vertical blur post-processes used by the pipeline
  55. */
  56. public blurVPostProcesses: PostProcess[] = [];
  57. /**
  58. * Post-process used to add colors of 2 textures (typically brightness + real scene color)
  59. */
  60. public textureAdderPostProcess: Nullable<PostProcess> = null;
  61. /**
  62. * Post-process used to create volumetric lighting effect
  63. */
  64. public volumetricLightPostProcess: Nullable<PostProcess> = null;
  65. /**
  66. * Post-process used to smooth the previous volumetric light post-process on the X axis
  67. */
  68. public volumetricLightSmoothXPostProcess: Nullable<BlurPostProcess> = null;
  69. /**
  70. * Post-process used to smooth the previous volumetric light post-process on the Y axis
  71. */
  72. public volumetricLightSmoothYPostProcess: Nullable<BlurPostProcess> = null;
  73. /**
  74. * Post-process used to merge the volumetric light effect and the real scene color
  75. */
  76. public volumetricLightMergePostProces: Nullable<PostProcess> = null;
  77. /**
  78. * Post-process used to store the final volumetric light post-process (attach/detach for debug purpose)
  79. */
  80. public volumetricLightFinalPostProcess: Nullable<PostProcess> = null;
  81. /**
  82. * Base post-process used to calculate the average luminance of the final image for HDR
  83. */
  84. public luminancePostProcess: Nullable<PostProcess> = null;
  85. /**
  86. * Post-processes used to create down sample post-processes in order to get
  87. * the average luminance of the final image for HDR
  88. * Array of length "StandardRenderingPipeline.LuminanceSteps"
  89. */
  90. public luminanceDownSamplePostProcesses: PostProcess[] = [];
  91. /**
  92. * Post-process used to create a HDR effect (light adaptation)
  93. */
  94. public hdrPostProcess: Nullable<PostProcess> = null;
  95. /**
  96. * Post-process used to store the final texture adder post-process (attach/detach for debug purpose)
  97. */
  98. public textureAdderFinalPostProcess: Nullable<PostProcess> = null;
  99. /**
  100. * Post-process used to store the final lens flare post-process (attach/detach for debug purpose)
  101. */
  102. public lensFlareFinalPostProcess: Nullable<PostProcess> = null;
  103. /**
  104. * Post-process used to merge the final HDR post-process and the real scene color
  105. */
  106. public hdrFinalPostProcess: Nullable<PostProcess> = null;
  107. /**
  108. * Post-process used to create a lens flare effect
  109. */
  110. public lensFlarePostProcess: Nullable<PostProcess> = null;
  111. /**
  112. * Post-process that merges the result of the lens flare post-process and the real scene color
  113. */
  114. public lensFlareComposePostProcess: Nullable<PostProcess> = null;
  115. /**
  116. * Post-process used to create a motion blur effect
  117. */
  118. public motionBlurPostProcess: Nullable<PostProcess> = null;
  119. /**
  120. * Post-process used to create a depth of field effect
  121. */
  122. public depthOfFieldPostProcess: Nullable<PostProcess> = null;
  123. /**
  124. * The Fast Approximate Anti-Aliasing post process which attemps to remove aliasing from an image.
  125. */
  126. public fxaaPostProcess: Nullable<FxaaPostProcess> = null;
  127. /**
  128. * Post-process used to simulate realtime reflections using the screen space and geometry renderer.
  129. */
  130. public screenSpaceReflectionPostProcess: Nullable<ScreenSpaceReflectionPostProcess> = null;
  131. // Values
  132. /**
  133. * Represents the brightness threshold in order to configure the illuminated surfaces
  134. */
  135. @serialize()
  136. public brightThreshold: number = 1.0;
  137. /**
  138. * Configures the blur intensity used for surexposed surfaces are highlighted surfaces (light halo)
  139. */
  140. @serialize()
  141. public blurWidth: number = 512.0;
  142. /**
  143. * Sets if the blur for highlighted surfaces must be only horizontal
  144. */
  145. @serialize()
  146. public horizontalBlur: boolean = false;
  147. /**
  148. * Gets the overall exposure used by the pipeline
  149. */
  150. @serialize()
  151. public get exposure(): number {
  152. return this._fixedExposure;
  153. }
  154. /**
  155. * Sets the overall exposure used by the pipeline
  156. */
  157. public set exposure(value: number) {
  158. this._fixedExposure = value;
  159. this._currentExposure = value;
  160. }
  161. /**
  162. * Texture used typically to simulate "dirty" on camera lens
  163. */
  164. @serializeAsTexture("lensTexture")
  165. public lensTexture: Nullable<Texture> = null;
  166. /**
  167. * Represents the offset coefficient based on Rayleigh principle. Typically in interval [-0.2, 0.2]
  168. */
  169. @serialize()
  170. public volumetricLightCoefficient: number = 0.2;
  171. /**
  172. * The overall power of volumetric lights, typically in interval [0, 10] maximum
  173. */
  174. @serialize()
  175. public volumetricLightPower: number = 4.0;
  176. /**
  177. * Used the set the blur intensity to smooth the volumetric lights
  178. */
  179. @serialize()
  180. public volumetricLightBlurScale: number = 64.0;
  181. /**
  182. * Light (spot or directional) used to generate the volumetric lights rays
  183. * The source light must have a shadow generate so the pipeline can get its
  184. * depth map
  185. */
  186. public sourceLight: Nullable<SpotLight | DirectionalLight> = null;
  187. /**
  188. * For eye adaptation, represents the minimum luminance the eye can see
  189. */
  190. @serialize()
  191. public hdrMinimumLuminance: number = 1.0;
  192. /**
  193. * For eye adaptation, represents the decrease luminance speed
  194. */
  195. @serialize()
  196. public hdrDecreaseRate: number = 0.5;
  197. /**
  198. * For eye adaptation, represents the increase luminance speed
  199. */
  200. @serialize()
  201. public hdrIncreaseRate: number = 0.5;
  202. /**
  203. * Gets wether or not the exposure of the overall pipeline should be automatically adjusted by the HDR post-process
  204. */
  205. @serialize()
  206. public get hdrAutoExposure(): boolean {
  207. return this._hdrAutoExposure;
  208. }
  209. /**
  210. * Sets wether or not the exposure of the overall pipeline should be automatically adjusted by the HDR post-process
  211. */
  212. public set hdrAutoExposure(value: boolean) {
  213. this._hdrAutoExposure = value;
  214. if (this.hdrPostProcess) {
  215. const defines = ["#define HDR"];
  216. if (value) {
  217. defines.push("#define AUTO_EXPOSURE");
  218. }
  219. this.hdrPostProcess.updateEffect(defines.join("\n"));
  220. }
  221. }
  222. /**
  223. * Lens color texture used by the lens flare effect. Mandatory if lens flare effect enabled
  224. */
  225. @serializeAsTexture("lensColorTexture")
  226. public lensColorTexture: Nullable<Texture> = null;
  227. /**
  228. * The overall strengh for the lens flare effect
  229. */
  230. @serialize()
  231. public lensFlareStrength: number = 20.0;
  232. /**
  233. * Dispersion coefficient for lens flare ghosts
  234. */
  235. @serialize()
  236. public lensFlareGhostDispersal: number = 1.4;
  237. /**
  238. * Main lens flare halo width
  239. */
  240. @serialize()
  241. public lensFlareHaloWidth: number = 0.7;
  242. /**
  243. * Based on the lens distortion effect, defines how much the lens flare result
  244. * is distorted
  245. */
  246. @serialize()
  247. public lensFlareDistortionStrength: number = 16.0;
  248. /**
  249. * Configures the blur intensity used for for lens flare (halo)
  250. */
  251. @serialize()
  252. public lensFlareBlurWidth: number = 512.0;
  253. /**
  254. * Lens star texture must be used to simulate rays on the flares and is available
  255. * in the documentation
  256. */
  257. @serializeAsTexture("lensStarTexture")
  258. public lensStarTexture: Nullable<Texture> = null;
  259. /**
  260. * As the "lensTexture" (can be the same texture or different), it is used to apply the lens
  261. * flare effect by taking account of the dirt texture
  262. */
  263. @serializeAsTexture("lensFlareDirtTexture")
  264. public lensFlareDirtTexture: Nullable<Texture> = null;
  265. /**
  266. * Represents the focal length for the depth of field effect
  267. */
  268. @serialize()
  269. public depthOfFieldDistance: number = 10.0;
  270. /**
  271. * Represents the blur intensity for the blurred part of the depth of field effect
  272. */
  273. @serialize()
  274. public depthOfFieldBlurWidth: number = 64.0;
  275. /**
  276. * Gets how much the image is blurred by the movement while using the motion blur post-process
  277. */
  278. @serialize()
  279. public get motionStrength(): number {
  280. return this._motionStrength;
  281. }
  282. /**
  283. * Sets how much the image is blurred by the movement while using the motion blur post-process
  284. */
  285. public set motionStrength(strength: number) {
  286. this._motionStrength = strength;
  287. if (this._isObjectBasedMotionBlur && this.motionBlurPostProcess) {
  288. (this.motionBlurPostProcess as MotionBlurPostProcess).motionStrength = strength;
  289. }
  290. }
  291. /**
  292. * Gets wether or not the motion blur post-process is object based or screen based.
  293. */
  294. @serialize()
  295. public get objectBasedMotionBlur(): boolean {
  296. return this._isObjectBasedMotionBlur;
  297. }
  298. /**
  299. * Sets wether or not the motion blur post-process should be object based or screen based
  300. */
  301. public set objectBasedMotionBlur(value: boolean) {
  302. const shouldRebuild = this._isObjectBasedMotionBlur !== value;
  303. this._isObjectBasedMotionBlur = value;
  304. if (shouldRebuild) {
  305. this._buildPipeline();
  306. }
  307. }
  308. /**
  309. * List of animations for the pipeline (IAnimatable implementation)
  310. */
  311. public animations: Animation[] = [];
  312. /**
  313. * Private members
  314. */
  315. private _scene: Scene;
  316. private _currentDepthOfFieldSource: Nullable<PostProcess> = null;
  317. private _basePostProcess: Nullable<PostProcess>;
  318. private _fixedExposure: number = 1.0;
  319. private _currentExposure: number = 1.0;
  320. private _hdrAutoExposure: boolean = false;
  321. private _hdrCurrentLuminance: number = 1.0;
  322. private _motionStrength: number = 1.0;
  323. private _isObjectBasedMotionBlur: boolean = false;
  324. private _floatTextureType: number;
  325. private _camerasToBeAttached: Array<Camera> = [];
  326. @serialize()
  327. private _ratio: number;
  328. // Getters and setters
  329. private _bloomEnabled: boolean = false;
  330. private _depthOfFieldEnabled: boolean = false;
  331. private _vlsEnabled: boolean = false;
  332. private _lensFlareEnabled: boolean = false;
  333. private _hdrEnabled: boolean = false;
  334. private _motionBlurEnabled: boolean = false;
  335. private _fxaaEnabled: boolean = false;
  336. private _screenSpaceReflectionsEnabled: boolean = false;
  337. private _motionBlurSamples: number = 64.0;
  338. private _volumetricLightStepsCount: number = 50.0;
  339. private _samples: number = 1;
  340. /**
  341. * @ignore
  342. * Specifies if the bloom pipeline is enabled
  343. */
  344. @serialize()
  345. public get BloomEnabled(): boolean {
  346. return this._bloomEnabled;
  347. }
  348. public set BloomEnabled(enabled: boolean) {
  349. if (this._bloomEnabled === enabled) {
  350. return;
  351. }
  352. this._bloomEnabled = enabled;
  353. this._buildPipeline();
  354. }
  355. /**
  356. * @ignore
  357. * Specifies if the depth of field pipeline is enabed
  358. */
  359. @serialize()
  360. public get DepthOfFieldEnabled(): boolean {
  361. return this._depthOfFieldEnabled;
  362. }
  363. public set DepthOfFieldEnabled(enabled: boolean) {
  364. if (this._depthOfFieldEnabled === enabled) {
  365. return;
  366. }
  367. this._depthOfFieldEnabled = enabled;
  368. this._buildPipeline();
  369. }
  370. /**
  371. * @ignore
  372. * Specifies if the lens flare pipeline is enabed
  373. */
  374. @serialize()
  375. public get LensFlareEnabled(): boolean {
  376. return this._lensFlareEnabled;
  377. }
  378. public set LensFlareEnabled(enabled: boolean) {
  379. if (this._lensFlareEnabled === enabled) {
  380. return;
  381. }
  382. this._lensFlareEnabled = enabled;
  383. this._buildPipeline();
  384. }
  385. /**
  386. * @ignore
  387. * Specifies if the HDR pipeline is enabled
  388. */
  389. @serialize()
  390. public get HDREnabled(): boolean {
  391. return this._hdrEnabled;
  392. }
  393. public set HDREnabled(enabled: boolean) {
  394. if (this._hdrEnabled === enabled) {
  395. return;
  396. }
  397. this._hdrEnabled = enabled;
  398. this._buildPipeline();
  399. }
  400. /**
  401. * @ignore
  402. * Specifies if the volumetric lights scattering effect is enabled
  403. */
  404. @serialize()
  405. public get VLSEnabled(): boolean {
  406. return this._vlsEnabled;
  407. }
  408. public set VLSEnabled(enabled) {
  409. if (this._vlsEnabled === enabled) {
  410. return;
  411. }
  412. if (enabled) {
  413. var geometry = this._scene.enableGeometryBufferRenderer();
  414. if (!geometry) {
  415. Logger.Warn("Geometry renderer is not supported, cannot create volumetric lights in Standard Rendering Pipeline");
  416. return;
  417. }
  418. }
  419. this._vlsEnabled = enabled;
  420. this._buildPipeline();
  421. }
  422. /**
  423. * @ignore
  424. * Specifies if the motion blur effect is enabled
  425. */
  426. @serialize()
  427. public get MotionBlurEnabled(): boolean {
  428. return this._motionBlurEnabled;
  429. }
  430. public set MotionBlurEnabled(enabled: boolean) {
  431. if (this._motionBlurEnabled === enabled) {
  432. return;
  433. }
  434. this._motionBlurEnabled = enabled;
  435. this._buildPipeline();
  436. }
  437. /**
  438. * Specifies if anti-aliasing is enabled
  439. */
  440. @serialize()
  441. public get fxaaEnabled(): boolean {
  442. return this._fxaaEnabled;
  443. }
  444. public set fxaaEnabled(enabled: boolean) {
  445. if (this._fxaaEnabled === enabled) {
  446. return;
  447. }
  448. this._fxaaEnabled = enabled;
  449. this._buildPipeline();
  450. }
  451. /**
  452. * Specifies if screen space reflections are enabled.
  453. */
  454. @serialize()
  455. public get screenSpaceReflectionsEnabled(): boolean {
  456. return this._screenSpaceReflectionsEnabled;
  457. }
  458. public set screenSpaceReflectionsEnabled(enabled: boolean) {
  459. if (this._screenSpaceReflectionsEnabled === enabled) {
  460. return;
  461. }
  462. this._screenSpaceReflectionsEnabled = enabled;
  463. this._buildPipeline();
  464. }
  465. /**
  466. * Specifies the number of steps used to calculate the volumetric lights
  467. * Typically in interval [50, 200]
  468. */
  469. @serialize()
  470. public get volumetricLightStepsCount(): number {
  471. return this._volumetricLightStepsCount;
  472. }
  473. public set volumetricLightStepsCount(count: number) {
  474. if (this.volumetricLightPostProcess) {
  475. this.volumetricLightPostProcess.updateEffect("#define VLS\n#define NB_STEPS " + count.toFixed(1));
  476. }
  477. this._volumetricLightStepsCount = count;
  478. }
  479. /**
  480. * Specifies the number of samples used for the motion blur effect
  481. * Typically in interval [16, 64]
  482. */
  483. @serialize()
  484. public get motionBlurSamples(): number {
  485. return this._motionBlurSamples;
  486. }
  487. public set motionBlurSamples(samples: number) {
  488. if (this.motionBlurPostProcess) {
  489. if (this._isObjectBasedMotionBlur) {
  490. (this.motionBlurPostProcess as MotionBlurPostProcess).motionBlurSamples = samples;
  491. } else {
  492. this.motionBlurPostProcess.updateEffect("#define MOTION_BLUR\n#define MAX_MOTION_SAMPLES " + samples.toFixed(1));
  493. }
  494. }
  495. this._motionBlurSamples = samples;
  496. }
  497. /**
  498. * Specifies MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1)
  499. */
  500. @serialize()
  501. public get samples(): number {
  502. return this._samples;
  503. }
  504. public set samples(sampleCount: number) {
  505. if (this._samples === sampleCount) {
  506. return;
  507. }
  508. this._samples = sampleCount;
  509. this._buildPipeline();
  510. }
  511. /**
  512. * Default pipeline should be used going forward but the standard pipeline will be kept for backwards compatibility.
  513. * @constructor
  514. * @param name The rendering pipeline name
  515. * @param scene The scene linked to this pipeline
  516. * @param ratio The size of the postprocesses (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5)
  517. * @param originalPostProcess the custom original color post-process. Must be "reusable". Can be null.
  518. * @param cameras The array of cameras that the rendering pipeline will be attached to
  519. */
  520. constructor(name: string, scene: Scene, ratio: number, originalPostProcess: Nullable<PostProcess> = null, cameras?: Camera[]) {
  521. super(scene.getEngine(), name);
  522. this._cameras = cameras || scene.cameras;
  523. this._cameras = this._cameras.slice();
  524. this._camerasToBeAttached = this._cameras.slice();
  525. // Initialize
  526. this._scene = scene;
  527. this._basePostProcess = originalPostProcess;
  528. this._ratio = ratio;
  529. // Misc
  530. this._floatTextureType = scene.getEngine().getCaps().textureFloatRender ? Constants.TEXTURETYPE_FLOAT : Constants.TEXTURETYPE_HALF_FLOAT;
  531. // Finish
  532. scene.postProcessRenderPipelineManager.addPipeline(this);
  533. this._buildPipeline();
  534. }
  535. private _buildPipeline(): void {
  536. var ratio = this._ratio;
  537. var scene = this._scene;
  538. this._disposePostProcesses();
  539. if (this._cameras !== null) {
  540. this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
  541. // get back cameras to be used to reattach pipeline
  542. this._cameras = this._camerasToBeAttached.slice();
  543. }
  544. this._reset();
  545. // Create pass post-process
  546. if (this._screenSpaceReflectionsEnabled) {
  547. this.screenSpaceReflectionPostProcess = new ScreenSpaceReflectionPostProcess("HDRPass", scene, ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, this._floatTextureType);
  548. this.screenSpaceReflectionPostProcess.onApplyObservable.add(() => {
  549. this._currentDepthOfFieldSource = this.screenSpaceReflectionPostProcess;
  550. });
  551. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRScreenSpaceReflections", () => this.screenSpaceReflectionPostProcess, true));
  552. }
  553. if (!this._basePostProcess) {
  554. this.originalPostProcess = new PostProcess("HDRPass", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", this._floatTextureType);
  555. }
  556. else {
  557. this.originalPostProcess = this._basePostProcess;
  558. }
  559. this.originalPostProcess.autoClear = !this.screenSpaceReflectionPostProcess;
  560. this.originalPostProcess.onApplyObservable.add(() => {
  561. this._currentDepthOfFieldSource = this.originalPostProcess;
  562. });
  563. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPassPostProcess", () => this.originalPostProcess, true));
  564. if (this._bloomEnabled) {
  565. // Create down sample X4 post-process
  566. this._createDownSampleX4PostProcess(scene, ratio / 4);
  567. // Create bright pass post-process
  568. this._createBrightPassPostProcess(scene, ratio / 4);
  569. // Create gaussian blur post-processes (down sampling blurs)
  570. this._createBlurPostProcesses(scene, ratio / 4, 1);
  571. // Create texture adder post-process
  572. this._createTextureAdderPostProcess(scene, ratio);
  573. // Create depth-of-field source post-process
  574. this.textureAdderFinalPostProcess = new PostProcess("HDRDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT);
  575. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBaseDepthOfFieldSource", () => { return this.textureAdderFinalPostProcess; }, true));
  576. }
  577. if (this._vlsEnabled) {
  578. // Create volumetric light
  579. this._createVolumetricLightPostProcess(scene, ratio);
  580. // Create volumetric light final post-process
  581. this.volumetricLightFinalPostProcess = new PostProcess("HDRVLSFinal", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT);
  582. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSFinal", () => { return this.volumetricLightFinalPostProcess; }, true));
  583. }
  584. if (this._lensFlareEnabled) {
  585. // Create lens flare post-process
  586. this._createLensFlarePostProcess(scene, ratio);
  587. // Create depth-of-field source post-process post lens-flare and disable it now
  588. this.lensFlareFinalPostProcess = new PostProcess("HDRPostLensFlareDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT);
  589. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostLensFlareDepthOfFieldSource", () => { return this.lensFlareFinalPostProcess; }, true));
  590. }
  591. if (this._hdrEnabled) {
  592. // Create luminance
  593. this._createLuminancePostProcesses(scene, this._floatTextureType);
  594. // Create HDR
  595. this._createHdrPostProcess(scene, ratio);
  596. // Create depth-of-field source post-process post hdr and disable it now
  597. this.hdrFinalPostProcess = new PostProcess("HDRPostHDReDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT);
  598. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostHDReDepthOfFieldSource", () => { return this.hdrFinalPostProcess; }, true));
  599. }
  600. if (this._depthOfFieldEnabled) {
  601. // Create gaussian blur used by depth-of-field
  602. this._createBlurPostProcesses(scene, ratio / 2, 3, "depthOfFieldBlurWidth");
  603. // Create depth-of-field post-process
  604. this._createDepthOfFieldPostProcess(scene, ratio);
  605. }
  606. if (this._motionBlurEnabled) {
  607. // Create motion blur post-process
  608. this._createMotionBlurPostProcess(scene, ratio);
  609. }
  610. if (this._fxaaEnabled) {
  611. // Create fxaa post-process
  612. this.fxaaPostProcess = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT);
  613. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRFxaa", () => { return this.fxaaPostProcess; }, true));
  614. }
  615. if (this._cameras !== null) {
  616. this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
  617. }
  618. if (!this._enableMSAAOnFirstPostProcess(this._samples) && this._samples > 1) {
  619. Logger.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0");
  620. }
  621. }
  622. // Down Sample X4 Post-Processs
  623. private _createDownSampleX4PostProcess(scene: Scene, ratio: number): void {
  624. var downSampleX4Offsets = new Array<number>(32);
  625. this.downSampleX4PostProcess = new PostProcess("HDRDownSampleX4", "standard", ["dsOffsets"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DOWN_SAMPLE_X4", this._floatTextureType);
  626. this.downSampleX4PostProcess.onApply = (effect: Effect) => {
  627. var id = 0;
  628. let width = (<PostProcess>this.downSampleX4PostProcess).width;
  629. let height = (<PostProcess>this.downSampleX4PostProcess).height;
  630. for (var i = -2; i < 2; i++) {
  631. for (var j = -2; j < 2; j++) {
  632. downSampleX4Offsets[id] = (i + 0.5) * (1.0 / width);
  633. downSampleX4Offsets[id + 1] = (j + 0.5) * (1.0 / height);
  634. id += 2;
  635. }
  636. }
  637. effect.setArray2("dsOffsets", downSampleX4Offsets);
  638. };
  639. // Add to pipeline
  640. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDownSampleX4", () => { return this.downSampleX4PostProcess; }, true));
  641. }
  642. // Brightpass Post-Process
  643. private _createBrightPassPostProcess(scene: Scene, ratio: number): void {
  644. var brightOffsets = new Array<number>(8);
  645. this.brightPassPostProcess = new PostProcess("HDRBrightPass", "standard", ["dsOffsets", "brightThreshold"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define BRIGHT_PASS", this._floatTextureType);
  646. this.brightPassPostProcess.onApply = (effect: Effect) => {
  647. var sU = (1.0 / (<PostProcess>this.brightPassPostProcess).width);
  648. var sV = (1.0 / (<PostProcess>this.brightPassPostProcess).height);
  649. brightOffsets[0] = -0.5 * sU;
  650. brightOffsets[1] = 0.5 * sV;
  651. brightOffsets[2] = 0.5 * sU;
  652. brightOffsets[3] = 0.5 * sV;
  653. brightOffsets[4] = -0.5 * sU;
  654. brightOffsets[5] = -0.5 * sV;
  655. brightOffsets[6] = 0.5 * sU;
  656. brightOffsets[7] = -0.5 * sV;
  657. effect.setArray2("dsOffsets", brightOffsets);
  658. effect.setFloat("brightThreshold", this.brightThreshold);
  659. };
  660. // Add to pipeline
  661. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBrightPass", () => { return this.brightPassPostProcess; }, true));
  662. }
  663. // Create blur H&V post-processes
  664. private _createBlurPostProcesses(scene: Scene, ratio: number, indice: number, blurWidthKey: string = "blurWidth"): void {
  665. var engine = scene.getEngine();
  666. var blurX = new BlurPostProcess("HDRBlurH" + "_" + indice, new Vector2(1, 0), (<any>this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, this._floatTextureType);
  667. var blurY = new BlurPostProcess("HDRBlurV" + "_" + indice, new Vector2(0, 1), (<any>this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, this._floatTextureType);
  668. blurX.onActivateObservable.add(() => {
  669. let dw = blurX.width / engine.getRenderWidth();
  670. blurX.kernel = (<any>this)[blurWidthKey] * dw;
  671. });
  672. blurY.onActivateObservable.add(() => {
  673. let dw = blurY.height / engine.getRenderHeight();
  674. blurY.kernel = this.horizontalBlur ? 64 * dw : (<any>this)[blurWidthKey] * dw;
  675. });
  676. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBlurH" + indice, () => { return blurX; }, true));
  677. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBlurV" + indice, () => { return blurY; }, true));
  678. this.blurHPostProcesses.push(blurX);
  679. this.blurVPostProcesses.push(blurY);
  680. }
  681. // Create texture adder post-process
  682. private _createTextureAdderPostProcess(scene: Scene, ratio: number): void {
  683. this.textureAdderPostProcess = new PostProcess("HDRTextureAdder", "standard", ["exposure"], ["otherSampler", "lensSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define TEXTURE_ADDER", this._floatTextureType);
  684. this.textureAdderPostProcess.onApply = (effect: Effect) => {
  685. effect.setTextureFromPostProcess("otherSampler", this._vlsEnabled ? this._currentDepthOfFieldSource : this.originalPostProcess);
  686. effect.setTexture("lensSampler", this.lensTexture);
  687. effect.setFloat("exposure", this._currentExposure);
  688. this._currentDepthOfFieldSource = this.textureAdderFinalPostProcess;
  689. };
  690. // Add to pipeline
  691. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRTextureAdder", () => { return this.textureAdderPostProcess; }, true));
  692. }
  693. private _createVolumetricLightPostProcess(scene: Scene, ratio: number): void {
  694. var geometryRenderer = <GeometryBufferRenderer>scene.enableGeometryBufferRenderer();
  695. geometryRenderer.enablePosition = true;
  696. var geometry = geometryRenderer.getGBuffer();
  697. // Base post-process
  698. this.volumetricLightPostProcess = new PostProcess("HDRVLS", "standard",
  699. ["shadowViewProjection", "cameraPosition", "sunDirection", "sunColor", "scatteringCoefficient", "scatteringPower", "depthValues"],
  700. ["shadowMapSampler", "positionSampler"],
  701. ratio / 8,
  702. null,
  703. Texture.BILINEAR_SAMPLINGMODE,
  704. scene.getEngine(),
  705. false, "#define VLS\n#define NB_STEPS " + this._volumetricLightStepsCount.toFixed(1));
  706. var depthValues = Vector2.Zero();
  707. this.volumetricLightPostProcess.onApply = (effect: Effect) => {
  708. if (this.sourceLight && this.sourceLight.getShadowGenerator() && this._scene.activeCamera) {
  709. var generator = this.sourceLight.getShadowGenerator()!;
  710. effect.setTexture("shadowMapSampler", generator.getShadowMap());
  711. effect.setTexture("positionSampler", geometry.textures[2]);
  712. effect.setColor3("sunColor", this.sourceLight.diffuse);
  713. effect.setVector3("sunDirection", this.sourceLight.getShadowDirection());
  714. effect.setVector3("cameraPosition", this._scene.activeCamera.globalPosition);
  715. effect.setMatrix("shadowViewProjection", generator.getTransformMatrix());
  716. effect.setFloat("scatteringCoefficient", this.volumetricLightCoefficient);
  717. effect.setFloat("scatteringPower", this.volumetricLightPower);
  718. depthValues.x = this.sourceLight.getDepthMinZ(this._scene.activeCamera);
  719. depthValues.y = this.sourceLight.getDepthMaxZ(this._scene.activeCamera);
  720. effect.setVector2("depthValues", depthValues);
  721. }
  722. };
  723. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLS", () => { return this.volumetricLightPostProcess; }, true));
  724. // Smooth
  725. this._createBlurPostProcesses(scene, ratio / 4, 0, "volumetricLightBlurScale");
  726. // Merge
  727. this.volumetricLightMergePostProces = new PostProcess("HDRVLSMerge", "standard", [], ["originalSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define VLSMERGE");
  728. this.volumetricLightMergePostProces.onApply = (effect: Effect) => {
  729. effect.setTextureFromPostProcess("originalSampler", this._bloomEnabled ? this.textureAdderFinalPostProcess : this.originalPostProcess);
  730. this._currentDepthOfFieldSource = this.volumetricLightFinalPostProcess;
  731. };
  732. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSMerge", () => { return this.volumetricLightMergePostProces; }, true));
  733. }
  734. // Create luminance
  735. private _createLuminancePostProcesses(scene: Scene, textureType: number): void {
  736. // Create luminance
  737. var size = Math.pow(3, StandardRenderingPipeline.LuminanceSteps);
  738. this.luminancePostProcess = new PostProcess("HDRLuminance", "standard", ["lumOffsets"], [], { width: size, height: size }, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LUMINANCE", textureType);
  739. var offsets: number[] = [];
  740. this.luminancePostProcess.onApply = (effect: Effect) => {
  741. var sU = (1.0 / (<PostProcess>this.luminancePostProcess).width);
  742. var sV = (1.0 / (<PostProcess>this.luminancePostProcess).height);
  743. offsets[0] = -0.5 * sU;
  744. offsets[1] = 0.5 * sV;
  745. offsets[2] = 0.5 * sU;
  746. offsets[3] = 0.5 * sV;
  747. offsets[4] = -0.5 * sU;
  748. offsets[5] = -0.5 * sV;
  749. offsets[6] = 0.5 * sU;
  750. offsets[7] = -0.5 * sV;
  751. effect.setArray2("lumOffsets", offsets);
  752. };
  753. // Add to pipeline
  754. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLuminance", () => { return this.luminancePostProcess; }, true));
  755. // Create down sample luminance
  756. for (var i = StandardRenderingPipeline.LuminanceSteps - 1; i >= 0; i--) {
  757. var size = Math.pow(3, i);
  758. var defines = "#define LUMINANCE_DOWN_SAMPLE\n";
  759. if (i === 0) {
  760. defines += "#define FINAL_DOWN_SAMPLER";
  761. }
  762. var postProcess = new PostProcess("HDRLuminanceDownSample" + i, "standard", ["dsOffsets", "halfDestPixelSize"], [], { width: size, height: size }, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, defines, textureType);
  763. this.luminanceDownSamplePostProcesses.push(postProcess);
  764. }
  765. // Create callbacks and add effects
  766. var lastLuminance: Nullable<PostProcess> = this.luminancePostProcess;
  767. this.luminanceDownSamplePostProcesses.forEach((pp, index) => {
  768. var downSampleOffsets = new Array<number>(18);
  769. pp.onApply = (effect: Effect) => {
  770. if (!lastLuminance) {
  771. return;
  772. }
  773. var id = 0;
  774. for (var x = -1; x < 2; x++) {
  775. for (var y = -1; y < 2; y++) {
  776. downSampleOffsets[id] = x / lastLuminance.width;
  777. downSampleOffsets[id + 1] = y / lastLuminance.height;
  778. id += 2;
  779. }
  780. }
  781. effect.setArray2("dsOffsets", downSampleOffsets);
  782. effect.setFloat("halfDestPixelSize", 0.5 / lastLuminance.width);
  783. if (index === this.luminanceDownSamplePostProcesses.length - 1) {
  784. lastLuminance = this.luminancePostProcess;
  785. } else {
  786. lastLuminance = pp;
  787. }
  788. };
  789. if (index === this.luminanceDownSamplePostProcesses.length - 1) {
  790. pp.onAfterRender = () => {
  791. var pixel = scene.getEngine().readPixels(0, 0, 1, 1);
  792. var bit_shift = new Vector4(1.0 / (255.0 * 255.0 * 255.0), 1.0 / (255.0 * 255.0), 1.0 / 255.0, 1.0);
  793. this._hdrCurrentLuminance = (pixel[0] * bit_shift.x + pixel[1] * bit_shift.y + pixel[2] * bit_shift.z + pixel[3] * bit_shift.w) / 100.0;
  794. };
  795. }
  796. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLuminanceDownSample" + index, () => { return pp; }, true));
  797. });
  798. }
  799. // Create HDR post-process
  800. private _createHdrPostProcess(scene: Scene, ratio: number): void {
  801. const defines = ["#define HDR"];
  802. if (this._hdrAutoExposure) {
  803. defines.push("#define AUTO_EXPOSURE");
  804. }
  805. this.hdrPostProcess = new PostProcess("HDR", "standard", ["averageLuminance"], ["textureAdderSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, defines.join("\n"), Constants.TEXTURETYPE_UNSIGNED_INT);
  806. var outputLiminance = 1;
  807. var time = 0;
  808. var lastTime = 0;
  809. this.hdrPostProcess.onApply = (effect: Effect) => {
  810. effect.setTextureFromPostProcess("textureAdderSampler", this._currentDepthOfFieldSource);
  811. time += scene.getEngine().getDeltaTime();
  812. if (outputLiminance < 0) {
  813. outputLiminance = this._hdrCurrentLuminance;
  814. } else {
  815. var dt = (lastTime - time) / 1000.0;
  816. if (this._hdrCurrentLuminance < outputLiminance + this.hdrDecreaseRate * dt) {
  817. outputLiminance += this.hdrDecreaseRate * dt;
  818. }
  819. else if (this._hdrCurrentLuminance > outputLiminance - this.hdrIncreaseRate * dt) {
  820. outputLiminance -= this.hdrIncreaseRate * dt;
  821. }
  822. else {
  823. outputLiminance = this._hdrCurrentLuminance;
  824. }
  825. }
  826. if (this.hdrAutoExposure) {
  827. this._currentExposure = this._fixedExposure / outputLiminance;
  828. } else {
  829. outputLiminance = Scalar.Clamp(outputLiminance, this.hdrMinimumLuminance, 1e20);
  830. effect.setFloat("averageLuminance", outputLiminance);
  831. }
  832. lastTime = time;
  833. this._currentDepthOfFieldSource = this.hdrFinalPostProcess;
  834. };
  835. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDR", () => { return this.hdrPostProcess; }, true));
  836. }
  837. // Create lens flare post-process
  838. private _createLensFlarePostProcess(scene: Scene, ratio: number): void {
  839. this.lensFlarePostProcess = new PostProcess("HDRLensFlare", "standard", ["strength", "ghostDispersal", "haloWidth", "resolution", "distortionStrength"], ["lensColorSampler"], ratio / 2, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE", Constants.TEXTURETYPE_UNSIGNED_INT);
  840. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlare", () => { return this.lensFlarePostProcess; }, true));
  841. this._createBlurPostProcesses(scene, ratio / 4, 2, "lensFlareBlurWidth");
  842. this.lensFlareComposePostProcess = new PostProcess("HDRLensFlareCompose", "standard", ["lensStarMatrix"], ["otherSampler", "lensDirtSampler", "lensStarSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE_COMPOSE", Constants.TEXTURETYPE_UNSIGNED_INT);
  843. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlareCompose", () => { return this.lensFlareComposePostProcess; }, true));
  844. var resolution = new Vector2(0, 0);
  845. // Lens flare
  846. this.lensFlarePostProcess.onApply = (effect: Effect) => {
  847. effect.setTextureFromPostProcess("textureSampler", this._bloomEnabled ? this.blurHPostProcesses[0] : this.originalPostProcess);
  848. effect.setTexture("lensColorSampler", this.lensColorTexture);
  849. effect.setFloat("strength", this.lensFlareStrength);
  850. effect.setFloat("ghostDispersal", this.lensFlareGhostDispersal);
  851. effect.setFloat("haloWidth", this.lensFlareHaloWidth);
  852. // Shift
  853. resolution.x = (<PostProcess>this.lensFlarePostProcess).width;
  854. resolution.y = (<PostProcess>this.lensFlarePostProcess).height;
  855. effect.setVector2("resolution", resolution);
  856. effect.setFloat("distortionStrength", this.lensFlareDistortionStrength);
  857. };
  858. // Compose
  859. var scaleBias1 = Matrix.FromValues(
  860. 2.0, 0.0, -1.0, 0.0,
  861. 0.0, 2.0, -1.0, 0.0,
  862. 0.0, 0.0, 1.0, 0.0,
  863. 0.0, 0.0, 0.0, 1.0
  864. );
  865. var scaleBias2 = Matrix.FromValues(
  866. 0.5, 0.0, 0.5, 0.0,
  867. 0.0, 0.5, 0.5, 0.0,
  868. 0.0, 0.0, 1.0, 0.0,
  869. 0.0, 0.0, 0.0, 1.0
  870. );
  871. this.lensFlareComposePostProcess.onApply = (effect: Effect) => {
  872. if (!this._scene.activeCamera) {
  873. return;
  874. }
  875. effect.setTextureFromPostProcess("otherSampler", this.lensFlarePostProcess);
  876. effect.setTexture("lensDirtSampler", this.lensFlareDirtTexture);
  877. effect.setTexture("lensStarSampler", this.lensStarTexture);
  878. // Lens start rotation matrix
  879. var camerax = (<Vector4>this._scene.activeCamera.getViewMatrix().getRow(0));
  880. var cameraz = (<Vector4>this._scene.activeCamera.getViewMatrix().getRow(2));
  881. var camRot = Vector3.Dot(camerax.toVector3(), new Vector3(1.0, 0.0, 0.0)) + Vector3.Dot(cameraz.toVector3(), new Vector3(0.0, 0.0, 1.0));
  882. camRot *= 4.0;
  883. var starRotation = Matrix.FromValues(
  884. Math.cos(camRot) * 0.5, -Math.sin(camRot), 0.0, 0.0,
  885. Math.sin(camRot), Math.cos(camRot) * 0.5, 0.0, 0.0,
  886. 0.0, 0.0, 1.0, 0.0,
  887. 0.0, 0.0, 0.0, 1.0
  888. );
  889. var lensStarMatrix = scaleBias2.multiply(starRotation).multiply(scaleBias1);
  890. effect.setMatrix("lensStarMatrix", lensStarMatrix);
  891. this._currentDepthOfFieldSource = this.lensFlareFinalPostProcess;
  892. };
  893. }
  894. // Create depth-of-field post-process
  895. private _createDepthOfFieldPostProcess(scene: Scene, ratio: number): void {
  896. this.depthOfFieldPostProcess = new PostProcess("HDRDepthOfField", "standard", ["distance"], ["otherSampler", "depthSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DEPTH_OF_FIELD", Constants.TEXTURETYPE_UNSIGNED_INT);
  897. this.depthOfFieldPostProcess.onApply = (effect: Effect) => {
  898. effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource);
  899. effect.setTexture("depthSampler", this._getDepthTexture());
  900. effect.setFloat("distance", this.depthOfFieldDistance);
  901. };
  902. // Add to pipeline
  903. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRDepthOfField", () => { return this.depthOfFieldPostProcess; }, true));
  904. }
  905. // Create motion blur post-process
  906. private _createMotionBlurPostProcess(scene: Scene, ratio: number): void {
  907. if (this._isObjectBasedMotionBlur) {
  908. const mb = new MotionBlurPostProcess("HDRMotionBlur", scene, ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT);
  909. mb.motionStrength = this.motionStrength;
  910. mb.motionBlurSamples = this.motionBlurSamples;
  911. this.motionBlurPostProcess = mb;
  912. } else {
  913. this.motionBlurPostProcess = new PostProcess("HDRMotionBlur", "standard",
  914. ["inverseViewProjection", "prevViewProjection", "screenSize", "motionScale", "motionStrength"],
  915. ["depthSampler"],
  916. ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define MOTION_BLUR\n#define MAX_MOTION_SAMPLES " + this.motionBlurSamples.toFixed(1), Constants.TEXTURETYPE_UNSIGNED_INT);
  917. var motionScale: number = 0;
  918. var prevViewProjection = Matrix.Identity();
  919. var invViewProjection = Matrix.Identity();
  920. var viewProjection = Matrix.Identity();
  921. var screenSize = Vector2.Zero();
  922. this.motionBlurPostProcess.onApply = (effect: Effect) => {
  923. viewProjection = scene.getProjectionMatrix().multiply(scene.getViewMatrix());
  924. viewProjection.invertToRef(invViewProjection);
  925. effect.setMatrix("inverseViewProjection", invViewProjection);
  926. effect.setMatrix("prevViewProjection", prevViewProjection);
  927. prevViewProjection = viewProjection;
  928. screenSize.x = (<PostProcess>this.motionBlurPostProcess).width;
  929. screenSize.y = (<PostProcess>this.motionBlurPostProcess).height;
  930. effect.setVector2("screenSize", screenSize);
  931. motionScale = scene.getEngine().getFps() / 60.0;
  932. effect.setFloat("motionScale", motionScale);
  933. effect.setFloat("motionStrength", this.motionStrength);
  934. effect.setTexture("depthSampler", this._getDepthTexture());
  935. };
  936. }
  937. this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRMotionBlur", () => { return this.motionBlurPostProcess; }, true));
  938. }
  939. private _getDepthTexture(): Texture {
  940. if (this._scene.getEngine().getCaps().drawBuffersExtension) {
  941. let renderer = <GeometryBufferRenderer>this._scene.enableGeometryBufferRenderer();
  942. return renderer.getGBuffer().textures[0];
  943. }
  944. return this._scene.enableDepthRenderer().getDepthMap();
  945. }
  946. private _disposePostProcesses(): void {
  947. for (var i = 0; i < this._cameras.length; i++) {
  948. var camera = this._cameras[i];
  949. if (this.originalPostProcess) { this.originalPostProcess.dispose(camera); }
  950. if (this.screenSpaceReflectionPostProcess) { this.screenSpaceReflectionPostProcess.dispose(camera); }
  951. if (this.downSampleX4PostProcess) { this.downSampleX4PostProcess.dispose(camera); }
  952. if (this.brightPassPostProcess) { this.brightPassPostProcess.dispose(camera); }
  953. if (this.textureAdderPostProcess) { this.textureAdderPostProcess.dispose(camera); }
  954. if (this.volumetricLightPostProcess) { this.volumetricLightPostProcess.dispose(camera); }
  955. if (this.volumetricLightSmoothXPostProcess) { this.volumetricLightSmoothXPostProcess.dispose(camera); }
  956. if (this.volumetricLightSmoothYPostProcess) { this.volumetricLightSmoothYPostProcess.dispose(camera); }
  957. if (this.volumetricLightMergePostProces) { this.volumetricLightMergePostProces.dispose(camera); }
  958. if (this.volumetricLightFinalPostProcess) { this.volumetricLightFinalPostProcess.dispose(camera); }
  959. if (this.lensFlarePostProcess) { this.lensFlarePostProcess.dispose(camera); }
  960. if (this.lensFlareComposePostProcess) { this.lensFlareComposePostProcess.dispose(camera); }
  961. for (var j = 0; j < this.luminanceDownSamplePostProcesses.length; j++) {
  962. this.luminanceDownSamplePostProcesses[j].dispose(camera);
  963. }
  964. if (this.luminancePostProcess) { this.luminancePostProcess.dispose(camera); }
  965. if (this.hdrPostProcess) { this.hdrPostProcess.dispose(camera); }
  966. if (this.hdrFinalPostProcess) { this.hdrFinalPostProcess.dispose(camera); }
  967. if (this.depthOfFieldPostProcess) { this.depthOfFieldPostProcess.dispose(camera); }
  968. if (this.motionBlurPostProcess) { this.motionBlurPostProcess.dispose(camera); }
  969. if (this.fxaaPostProcess) { this.fxaaPostProcess.dispose(camera); }
  970. for (var j = 0; j < this.blurHPostProcesses.length; j++) {
  971. this.blurHPostProcesses[j].dispose(camera);
  972. }
  973. for (var j = 0; j < this.blurVPostProcesses.length; j++) {
  974. this.blurVPostProcesses[j].dispose(camera);
  975. }
  976. }
  977. this.originalPostProcess = null;
  978. this.downSampleX4PostProcess = null;
  979. this.brightPassPostProcess = null;
  980. this.textureAdderPostProcess = null;
  981. this.textureAdderFinalPostProcess = null;
  982. this.volumetricLightPostProcess = null;
  983. this.volumetricLightSmoothXPostProcess = null;
  984. this.volumetricLightSmoothYPostProcess = null;
  985. this.volumetricLightMergePostProces = null;
  986. this.volumetricLightFinalPostProcess = null;
  987. this.lensFlarePostProcess = null;
  988. this.lensFlareComposePostProcess = null;
  989. this.luminancePostProcess = null;
  990. this.hdrPostProcess = null;
  991. this.hdrFinalPostProcess = null;
  992. this.depthOfFieldPostProcess = null;
  993. this.motionBlurPostProcess = null;
  994. this.fxaaPostProcess = null;
  995. this.screenSpaceReflectionPostProcess = null;
  996. this.luminanceDownSamplePostProcesses = [];
  997. this.blurHPostProcesses = [];
  998. this.blurVPostProcesses = [];
  999. }
  1000. /**
  1001. * Dispose of the pipeline and stop all post processes
  1002. */
  1003. public dispose(): void {
  1004. this._disposePostProcesses();
  1005. this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
  1006. super.dispose();
  1007. }
  1008. /**
  1009. * Serialize the rendering pipeline (Used when exporting)
  1010. * @returns the serialized object
  1011. */
  1012. public serialize(): any {
  1013. var serializationObject = SerializationHelper.Serialize(this);
  1014. if (this.sourceLight) {
  1015. serializationObject.sourceLightId = this.sourceLight.id;
  1016. }
  1017. if (this.screenSpaceReflectionPostProcess) {
  1018. serializationObject.screenSpaceReflectionPostProcess = SerializationHelper.Serialize(this.screenSpaceReflectionPostProcess);
  1019. }
  1020. serializationObject.customType = "StandardRenderingPipeline";
  1021. return serializationObject;
  1022. }
  1023. /**
  1024. * Parse the serialized pipeline
  1025. * @param source Source pipeline.
  1026. * @param scene The scene to load the pipeline to.
  1027. * @param rootUrl The URL of the serialized pipeline.
  1028. * @returns An instantiated pipeline from the serialized object.
  1029. */
  1030. public static Parse(source: any, scene: Scene, rootUrl: string): StandardRenderingPipeline {
  1031. var p = SerializationHelper.Parse(() => new StandardRenderingPipeline(source._name, scene, source._ratio), source, scene, rootUrl);
  1032. if (source.sourceLightId) {
  1033. p.sourceLight = <SpotLight | DirectionalLight>scene.getLightByID(source.sourceLightId);
  1034. }
  1035. if (source.screenSpaceReflectionPostProcess) {
  1036. SerializationHelper.Parse(() => p.screenSpaceReflectionPostProcess, source.screenSpaceReflectionPostProcess, scene, rootUrl);
  1037. }
  1038. return p;
  1039. }
  1040. /**
  1041. * Luminance steps
  1042. */
  1043. public static LuminanceSteps: number = 6;
  1044. }
  1045. _TypeStore.RegisteredTypes["BABYLON.StandardRenderingPipeline"] = StandardRenderingPipeline;