babylon.defaultRenderingPipeline.ts 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597
  1. module BABYLON {
  2. /**
  3. * The default rendering pipeline can be added to a scene to apply common post processing effects such as anti-aliasing or depth of field.
  4. * See https://doc.babylonjs.com/how_to/using_default_rendering_pipeline
  5. */
  6. export class DefaultRenderingPipeline extends PostProcessRenderPipeline implements IDisposable, IAnimatable {
  7. private _scene: Scene;
  8. private _originalCameras:Array<Camera> = [];
  9. /**
  10. * ID of the sharpen post process,
  11. */
  12. readonly SharpenPostProcessId: string = "SharpenPostProcessEffect";
  13. /**
  14. * ID of the image processing post process;
  15. */
  16. readonly ImageProcessingPostProcessId: string = "ImageProcessingPostProcessEffect";
  17. /**
  18. * ID of the Fast Approximate Anti-Aliasing post process;
  19. */
  20. readonly FxaaPostProcessId: string = "FxaaPostProcessEffect";
  21. /**
  22. * ID of the chromatic aberration post process,
  23. */
  24. readonly ChromaticAberrationPostProcessId: string = "ChromaticAberrationPostProcessEffect";
  25. // Post-processes
  26. /**
  27. * Sharpen post process which will apply a sharpen convolution to enhance edges
  28. */
  29. public sharpen: SharpenPostProcess;
  30. private _sharpenEffect: PostProcessRenderEffect;
  31. private bloom: BloomEffect;
  32. private _defaultPipelineMerge:DefaultPipelineMergeMergePostProcess;
  33. private _defaultPipelineMergeEffect: PostProcessRenderEffect;
  34. /**
  35. * Depth of field effect, applies a blur based on how far away objects are from the focus distance.
  36. */
  37. public depthOfField: DepthOfFieldEffect;
  38. /**
  39. * The Fast Approximate Anti-Aliasing post process which attemps to remove aliasing from an image.
  40. */
  41. public fxaa: FxaaPostProcess;
  42. /**
  43. * Image post processing pass used to perform operations such as tone mapping or color grading.
  44. */
  45. public imageProcessing: ImageProcessingPostProcess;
  46. /**
  47. * Chromatic aberration post process which will shift rgb colors in the image
  48. */
  49. public chromaticAberration: ChromaticAberrationPostProcess;
  50. private _chromaticAberrationEffect: PostProcessRenderEffect;
  51. /**
  52. * Animations which can be used to tweak settings over a period of time
  53. */
  54. public animations: Animation[] = [];
  55. // Values
  56. private _sharpenEnabled:boolean = false;
  57. private _bloomEnabled: boolean = false;
  58. private _depthOfFieldEnabled: boolean = false;
  59. private _depthOfFieldBlurLevel = DepthOfFieldEffectBlurLevel.Low;
  60. private _fxaaEnabled: boolean = false;
  61. private _msaaEnabled: boolean = false;
  62. private _imageProcessingEnabled: boolean = true;
  63. private _defaultPipelineTextureType: number;
  64. private _bloomScale: number = 0.6;
  65. private _chromaticAberrationEnabled:boolean = false;
  66. private _buildAllowed = true;
  67. /**
  68. * Enable or disable the sharpen process from the pipeline
  69. */
  70. public set sharpenEnabled(enabled: boolean) {
  71. if (this._sharpenEnabled === enabled) {
  72. return;
  73. }
  74. this._sharpenEnabled = enabled;
  75. this._buildPipeline();
  76. }
  77. @serialize()
  78. public get sharpenEnabled(): boolean {
  79. return this._sharpenEnabled;
  80. }
  81. private _bloomKernel: number = 64;
  82. /**
  83. * Specifies the size of the bloom blur kernel, relative to the final output size
  84. */
  85. @serialize()
  86. public get bloomKernel(): number{
  87. return this._bloomKernel;
  88. }
  89. public set bloomKernel(value: number){
  90. this._bloomKernel = value;
  91. this.bloom.kernel = value;
  92. }
  93. /**
  94. * Specifies the weight of the bloom in the final rendering
  95. */
  96. @serialize()
  97. private _bloomWeight: number = 0.15;
  98. /**
  99. * Specifies the luma threshold for the area that will be blurred by the bloom
  100. */
  101. @serialize()
  102. private _bloomThreshold: number = 0.9;
  103. @serialize()
  104. private _hdr: boolean;
  105. /**
  106. * The strength of the bloom.
  107. */
  108. public set bloomWeight(value: number) {
  109. if (this._bloomWeight === value) {
  110. return;
  111. }
  112. if(this._defaultPipelineMerge._mergeOptions && this._defaultPipelineMerge._mergeOptions.bloom){
  113. this._defaultPipelineMerge._mergeOptions.bloom.weight = value;
  114. }
  115. this._bloomWeight = value;
  116. }
  117. @serialize()
  118. public get bloomWeight(): number {
  119. return this._bloomWeight;
  120. }
  121. /**
  122. * The strength of the bloom.
  123. */
  124. public set bloomThreshold(value: number) {
  125. if (this._bloomThreshold === value) {
  126. return;
  127. }
  128. if((this.bloom.threshold > 0 && value == 0) || (this.bloom.threshold == 0 && value > 0)){
  129. this.bloom.threshold = value;
  130. this._bloomThreshold = value;
  131. this._buildPipeline();
  132. }else{
  133. this.bloom.threshold = value;
  134. this._bloomThreshold = value;
  135. }
  136. }
  137. @serialize()
  138. public get bloomThreshold(): number {
  139. return this._bloomThreshold;
  140. }
  141. /**
  142. * The scale of the bloom, lower value will provide better performance.
  143. */
  144. public set bloomScale(value: number) {
  145. if (this._bloomScale === value) {
  146. return;
  147. }
  148. this._bloomScale = value;
  149. // recreate bloom and dispose old as this setting is not dynamic
  150. this._rebuildBloom();
  151. this._buildPipeline();
  152. }
  153. @serialize()
  154. public get bloomScale(): number {
  155. return this._bloomScale;
  156. }
  157. /**
  158. * Enable or disable the bloom from the pipeline
  159. */
  160. public set bloomEnabled(enabled: boolean) {
  161. if (this._bloomEnabled === enabled) {
  162. return;
  163. }
  164. this._bloomEnabled = enabled;
  165. this._buildPipeline();
  166. }
  167. @serialize()
  168. public get bloomEnabled(): boolean {
  169. return this._bloomEnabled;
  170. }
  171. private _rebuildBloom(){
  172. // recreate bloom and dispose old as this setting is not dynamic
  173. var oldBloom = this.bloom;
  174. this.bloom = new BloomEffect(this._scene, this.bloomScale, this.bloomKernel, this._defaultPipelineTextureType, false);
  175. this.bloom.threshold = oldBloom.threshold;
  176. for (var i = 0; i < this._cameras.length; i++) {
  177. oldBloom.disposeEffects(this._cameras[i]);
  178. }
  179. }
  180. /**
  181. * If the depth of field is enabled.
  182. */
  183. @serialize()
  184. public get depthOfFieldEnabled(): boolean {
  185. return this._depthOfFieldEnabled;
  186. }
  187. public set depthOfFieldEnabled(enabled: boolean) {
  188. if (this._depthOfFieldEnabled === enabled) {
  189. return;
  190. }
  191. this._depthOfFieldEnabled = enabled;
  192. this._buildPipeline();
  193. }
  194. /**
  195. * Blur level of the depth of field effect. (Higher blur will effect performance)
  196. */
  197. @serialize()
  198. public get depthOfFieldBlurLevel(): DepthOfFieldEffectBlurLevel {
  199. return this._depthOfFieldBlurLevel;
  200. }
  201. public set depthOfFieldBlurLevel(value: DepthOfFieldEffectBlurLevel) {
  202. if (this._depthOfFieldBlurLevel === value) {
  203. return;
  204. }
  205. this._depthOfFieldBlurLevel = value;
  206. // recreate dof and dispose old as this setting is not dynamic
  207. var oldDof = this.depthOfField;
  208. this.depthOfField = new DepthOfFieldEffect(this._scene, null, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType, false);
  209. this.depthOfField.focalLength = oldDof.focalLength;
  210. this.depthOfField.focusDistance = oldDof.focusDistance;
  211. this.depthOfField.fStop = oldDof.fStop;
  212. this.depthOfField.lensSize = oldDof.lensSize;
  213. for (var i = 0; i < this._cameras.length; i++) {
  214. oldDof.disposeEffects(this._cameras[i]);
  215. }
  216. this._buildPipeline();
  217. }
  218. /**
  219. * If the anti aliasing is enabled.
  220. */
  221. public set fxaaEnabled(enabled: boolean) {
  222. if (this._fxaaEnabled === enabled) {
  223. return;
  224. }
  225. this._fxaaEnabled = enabled;
  226. this._buildPipeline();
  227. }
  228. @serialize()
  229. public get fxaaEnabled(): boolean {
  230. return this._fxaaEnabled;
  231. }
  232. /**
  233. * If the multisample anti-aliasing is enabled.
  234. */
  235. public set msaaEnabled(enabled: boolean) {
  236. if (this._msaaEnabled === enabled) {
  237. return;
  238. }
  239. this._msaaEnabled = enabled;
  240. this._buildPipeline();
  241. }
  242. @serialize()
  243. public get msaaEnabled(): boolean {
  244. return this._msaaEnabled;
  245. }
  246. /**
  247. * If image processing is enabled.
  248. */
  249. public set imageProcessingEnabled(enabled: boolean) {
  250. if (this._imageProcessingEnabled === enabled) {
  251. return;
  252. }
  253. this._imageProcessingEnabled = enabled;
  254. this._buildPipeline();
  255. }
  256. @serialize()
  257. public get imageProcessingEnabled(): boolean {
  258. return this._imageProcessingEnabled;
  259. }
  260. /**
  261. * Enable or disable the chromaticAberration process from the pipeline
  262. */
  263. public set chromaticAberrationEnabled(enabled: boolean) {
  264. if (this._chromaticAberrationEnabled === enabled) {
  265. return;
  266. }
  267. this._chromaticAberrationEnabled = enabled;
  268. this._buildPipeline();
  269. }
  270. @serialize()
  271. public get chromaticAberrationEnabled(): boolean {
  272. return this._chromaticAberrationEnabled;
  273. }
  274. /**
  275. * @constructor
  276. * @param {string} name - The rendering pipeline name
  277. * @param {BABYLON.Scene} scene - The scene linked to this pipeline
  278. * @param {any} ratio - The size of the postprocesses (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5)
  279. * @param {BABYLON.Camera[]} cameras - The array of cameras that the rendering pipeline will be attached to
  280. * @param {boolean} automaticBuild - if false, you will have to manually call prepare() to update the pipeline
  281. */
  282. constructor(name: string, hdr: boolean, scene: Scene, cameras?: Camera[], automaticBuild = true) {
  283. super(scene.getEngine(), name);
  284. this._cameras = cameras ||  [];
  285. this._originalCameras = this._cameras.slice();
  286. this._buildAllowed = automaticBuild;
  287. // Initialize
  288. this._scene = scene;
  289. var caps = this._scene.getEngine().getCaps();
  290. this._hdr = hdr && (caps.textureHalfFloatRender || caps.textureFloatRender);
  291. // Misc
  292. if (this._hdr) {
  293. if (caps.textureHalfFloatRender) {
  294. this._defaultPipelineTextureType = Engine.TEXTURETYPE_HALF_FLOAT;
  295. }
  296. else if (caps.textureFloatRender) {
  297. this._defaultPipelineTextureType = Engine.TEXTURETYPE_FLOAT;
  298. }
  299. } else {
  300. this._defaultPipelineTextureType = Engine.TEXTURETYPE_UNSIGNED_INT;
  301. }
  302. // Attach
  303. scene.postProcessRenderPipelineManager.addPipeline(this);
  304. var engine = this._scene.getEngine();
  305. // Create post processes before hand so they can be modified before enabled.
  306. // Block compilation flag is set to true to avoid compilation prior to use, these will be updated on first use in build pipeline.
  307. this.sharpen = new SharpenPostProcess("sharpen", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType, true);
  308. this._sharpenEffect = new PostProcessRenderEffect(engine, this.SharpenPostProcessId, () => { return this.sharpen; }, true);
  309. this.depthOfField = new DepthOfFieldEffect(this._scene, null, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType, false, true);
  310. this.bloom = new BloomEffect(this._scene, this.bloomScale, this.bloomKernel, this._defaultPipelineTextureType, false, true);
  311. this._defaultPipelineMerge = new DefaultPipelineMergeMergePostProcess("defaultPipelineMerge", {}, 1, null, BABYLON.Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, this._defaultPipelineTextureType, true);
  312. this._defaultPipelineMergeEffect = new PostProcessRenderEffect(engine, "defaultPipelineMerge", () => { return this._defaultPipelineMerge; }, true);
  313. this.chromaticAberration = new ChromaticAberrationPostProcess("ChromaticAberration", engine.getRenderWidth(), engine.getRenderHeight(), 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType, true);
  314. this._chromaticAberrationEffect = new PostProcessRenderEffect(engine, this.ChromaticAberrationPostProcessId, () => { return this.chromaticAberration; }, true);
  315. this._buildPipeline();
  316. }
  317. /**
  318. * Force the compilation of the entire pipeline.
  319. */
  320. public prepare(): void {
  321. let previousState = this._buildAllowed;
  322. this._buildAllowed = true;
  323. this._buildPipeline();
  324. this._buildAllowed = previousState;
  325. }
  326. private _hasCleared = false;
  327. private _prevPostProcess:Nullable<PostProcess> = null;
  328. private _prevPrevPostProcess:Nullable<PostProcess> = null;
  329. private _firstPostProcess:Nullable<PostProcess> = null;
  330. private _setAutoClearAndTextureSharing(postProcess:PostProcess, skipTextureSharing = false){
  331. if(this._hasCleared){
  332. postProcess.autoClear = false;
  333. }else{
  334. postProcess.autoClear = true;
  335. this._scene.autoClear = false;
  336. this._hasCleared = true;
  337. }
  338. if(!this._firstPostProcess){
  339. this._firstPostProcess = postProcess;
  340. skipTextureSharing = true;
  341. }
  342. if(!skipTextureSharing){
  343. if(this._prevPrevPostProcess){
  344. postProcess.shareOutputWith(this._prevPrevPostProcess);
  345. }else{
  346. postProcess.useOwnOutput();
  347. }
  348. if(this._prevPostProcess){
  349. this._prevPrevPostProcess = this._prevPostProcess;
  350. }
  351. this._prevPostProcess = postProcess;
  352. }
  353. }
  354. private _buildPipeline() {
  355. if (!this._buildAllowed) {
  356. return;
  357. }
  358. this._scene.autoClear = true;
  359. var engine = this._scene.getEngine();
  360. this._disposePostProcesses();
  361. if (this._cameras !== null) {
  362. this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
  363. // get back cameras to be used to reattach pipeline
  364. this._cameras = this._originalCameras.slice();
  365. }
  366. this._reset();
  367. this._prevPostProcess = null;
  368. this._prevPrevPostProcess = null;
  369. this._hasCleared = false;
  370. this._firstPostProcess = null;
  371. var mergeOptions = new DefaultPipelineMergePostProcessOptions();
  372. if (this.fxaaEnabled) {
  373. this.fxaa = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
  374. this.addEffect(new PostProcessRenderEffect(engine, this.FxaaPostProcessId, () => { return this.fxaa; }, true));
  375. this._setAutoClearAndTextureSharing(this.fxaa);
  376. }
  377. if (this.sharpenEnabled) {
  378. if(!this.sharpen.isReady()){
  379. this.sharpen.updateEffect();
  380. }
  381. this.addEffect(this._sharpenEffect);
  382. this._setAutoClearAndTextureSharing(this.sharpen);
  383. }
  384. if (this.depthOfFieldEnabled) {
  385. var depthTexture = this._scene.enableDepthRenderer(this._cameras[0]).getDepthMap();
  386. this.depthOfField.depthTexture = depthTexture;
  387. if(!this.depthOfField._isReady()){
  388. this.depthOfField._updateEffects();
  389. }
  390. mergeOptions.depthOfField = {circleOfConfusion: this.depthOfField._effects[0], blurSteps: this.depthOfField._depthOfFieldBlurX};
  391. if(!mergeOptions.originalFromInput){
  392. mergeOptions.originalFromInput=this.depthOfField._effects[0];
  393. }
  394. this.addEffect(this.depthOfField);
  395. this._setAutoClearAndTextureSharing(this.depthOfField._effects[0], true);
  396. }
  397. if (this.bloomEnabled) {
  398. if(!this.bloom._isReady()){
  399. this.bloom._updateEffects();
  400. }
  401. mergeOptions.bloom = {blurred: this.bloom._effects[this.bloom._effects.length-1], weight: this.bloomWeight, mix: this._bloomThreshold == 0}
  402. if(!mergeOptions.originalFromInput){
  403. mergeOptions.originalFromInput=this.bloom._effects[0];
  404. }
  405. this.bloom._downscale._inputPostProcess = this._firstPostProcess;
  406. this.addEffect(this.bloom);
  407. this._setAutoClearAndTextureSharing(this.bloom._effects[0], true);
  408. }
  409. if(mergeOptions.originalFromInput){
  410. this._defaultPipelineMerge._mergeOptions = mergeOptions;
  411. this._defaultPipelineMerge.updateEffect();
  412. this.addEffect(this._defaultPipelineMergeEffect);
  413. this._setAutoClearAndTextureSharing(this._defaultPipelineMerge, true);
  414. }
  415. if (this._imageProcessingEnabled) {
  416. this.imageProcessing = new ImageProcessingPostProcess("imageProcessing", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
  417. if (this._hdr) {
  418. this.addEffect(new PostProcessRenderEffect(engine, this.ImageProcessingPostProcessId, () => { return this.imageProcessing; }, true));
  419. this._setAutoClearAndTextureSharing(this.imageProcessing);
  420. } else {
  421. this._scene.imageProcessingConfiguration.applyByPostProcess = false;
  422. }
  423. }
  424. if (this.chromaticAberrationEnabled) {
  425. if(!this.chromaticAberration.isReady()){
  426. this.chromaticAberration.updateEffect();
  427. }
  428. this.addEffect(this._chromaticAberrationEffect);
  429. this._setAutoClearAndTextureSharing(this.chromaticAberration);
  430. }
  431. if (this._cameras !== null) {
  432. this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
  433. }
  434. if(this.msaaEnabled){
  435. if(!this._enableMSAAOnFirstPostProcess()){
  436. BABYLON.Tools.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0");
  437. }
  438. }
  439. }
  440. private _disposePostProcesses(disposeNonRecreated = false): void {
  441. for (var i = 0; i < this._cameras.length; i++) {
  442. var camera = this._cameras[i];
  443. if (this.imageProcessing) {
  444. this.imageProcessing.dispose(camera);
  445. }
  446. if (this.fxaa) {
  447. this.fxaa.dispose(camera);
  448. }
  449. // These are created in the constructor and should not be disposed on every pipeline change
  450. if(disposeNonRecreated){
  451. if (this.sharpen) {
  452. this.sharpen.dispose(camera);
  453. }
  454. if(this.depthOfField){
  455. this.depthOfField.disposeEffects(camera);
  456. }
  457. if(this.bloom){
  458. this.bloom.disposeEffects(camera);
  459. }
  460. if(this.chromaticAberration){
  461. this.chromaticAberration.dispose(camera);
  462. }
  463. if(this._defaultPipelineMerge){
  464. this._defaultPipelineMerge.dispose(camera);
  465. }
  466. }
  467. }
  468. (<any>this.imageProcessing) = null;
  469. (<any>this.fxaa) = null;
  470. if(disposeNonRecreated){
  471. (<any>this.sharpen) = null;
  472. (<any>this._sharpenEffect) = null;
  473. (<any>this.depthOfField) = null;
  474. (<any>this.bloom) = null;
  475. (<any>this.chromaticAberration) = null;
  476. (<any>this._chromaticAberrationEffect) = null;
  477. }
  478. }
  479. /**
  480. * Dispose of the pipeline and stop all post processes
  481. */
  482. public dispose(): void {
  483. this._disposePostProcesses(true);
  484. this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
  485. this._scene.autoClear = true;
  486. super.dispose();
  487. }
  488. /**
  489. * Serialize the rendering pipeline (Used when exporting)
  490. * @returns the serialized object
  491. */
  492. public serialize(): any {
  493. var serializationObject = SerializationHelper.Serialize(this);
  494. serializationObject.customType = "DefaultRenderingPipeline";
  495. return serializationObject;
  496. }
  497. /**
  498. * Parse the serialized pipeline
  499. * @param source Source pipeline.
  500. * @param scene The scene to load the pipeline to.
  501. * @param rootUrl The URL of the serialized pipeline.
  502. * @returns An instantiated pipeline from the serialized object.
  503. */
  504. public static Parse(source: any, scene: Scene, rootUrl: string): DefaultRenderingPipeline {
  505. return SerializationHelper.Parse(() => new DefaultRenderingPipeline(source._name, source._name._hdr, scene), source, scene, rootUrl);
  506. }
  507. }
  508. }