babylon.defaultRenderingPipeline.ts 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490
  1. module BABYLON {
  2. /**
  3. * The default rendering pipeline can be added to a scene to apply common post processing effects such as anti-aliasing or depth of field.
  4. * See https://doc.babylonjs.com/how_to/using_default_rendering_pipeline
  5. */
  6. export class DefaultRenderingPipeline extends PostProcessRenderPipeline implements IDisposable, IAnimatable {
  7. private _scene: Scene;
  8. private _originalCameras:Array<Camera> = [];
  9. /**
  10. * ID of the sharpen post process,
  11. */
  12. readonly SharpenPostProcessId: string = "SharpenPostProcessEffect";
  13. /**
  14. * ID of the image processing post process;
  15. */
  16. readonly ImageProcessingPostProcessId: string = "ImageProcessingPostProcessEffect";
  17. /**
  18. * ID of the Fast Approximate Anti-Aliasing post process;
  19. */
  20. readonly FxaaPostProcessId: string = "FxaaPostProcessEffect";
  21. /**
  22. * ID of the chromatic aberration post process,
  23. */
  24. readonly ChromaticAberrationPostProcessId: string = "ChromaticAberrationPostProcessEffect";
  25. // Post-processes
  26. /**
  27. * Sharpen post process which will apply a sharpen convolution to enhance edges
  28. */
  29. public sharpen: SharpenPostProcess;
  30. private _sharpenEffect: PostProcessRenderEffect;
  31. private bloom: BloomEffect;
  32. /**
  33. * Depth of field effect, applies a blur based on how far away objects are from the focus distance.
  34. */
  35. public depthOfField: DepthOfFieldEffect;
  36. /**
  37. * The Fast Approximate Anti-Aliasing post process which attemps to remove aliasing from an image.
  38. */
  39. public fxaa: FxaaPostProcess;
  40. /**
  41. * Image post processing pass used to perform operations such as tone mapping or color grading.
  42. */
  43. public imageProcessing: ImageProcessingPostProcess;
  44. /**
  45. * Chromatic aberration post process which will shift rgb colors in the image
  46. */
  47. public chromaticAberration: ChromaticAberrationPostProcess;
  48. private _chromaticAberrationEffect: PostProcessRenderEffect;
  49. /**
  50. * Animations which can be used to tweak settings over a period of time
  51. */
  52. public animations: Animation[] = [];
  53. // Values
  54. private _sharpenEnabled:boolean = false;
  55. private _bloomEnabled: boolean = false;
  56. private _depthOfFieldEnabled: boolean = false;
  57. private _depthOfFieldBlurLevel = DepthOfFieldEffectBlurLevel.Low;
  58. private _fxaaEnabled: boolean = false;
  59. private _msaaEnabled: boolean = false;
  60. private _imageProcessingEnabled: boolean = true;
  61. private _defaultPipelineTextureType: number;
  62. private _bloomScale: number = 0.6;
  63. private _chromaticAberrationEnabled:boolean = false;
  64. private _buildAllowed = true;
  65. /**
  66. * Enable or disable the sharpen process from the pipeline
  67. */
  68. public set sharpenEnabled(enabled: boolean) {
  69. if (this._sharpenEnabled === enabled) {
  70. return;
  71. }
  72. this._sharpenEnabled = enabled;
  73. this._buildPipeline();
  74. }
  75. @serialize()
  76. public get sharpenEnabled(): boolean {
  77. return this._sharpenEnabled;
  78. }
  79. /**
  80. * Specifies the size of the bloom blur kernel, relative to the final output size
  81. */
  82. @serialize()
  83. public bloomKernel: number = 64;
  84. /**
  85. * Specifies the weight of the bloom in the final rendering
  86. */
  87. @serialize()
  88. private _bloomWeight: number = 0.15;
  89. @serialize()
  90. private _hdr: boolean;
  91. /**
  92. * The strength of the bloom.
  93. */
  94. public set bloomWeight(value: number) {
  95. if (this._bloomWeight === value) {
  96. return;
  97. }
  98. if(this.bloom._merge._mergeOptions.bloom){
  99. this.bloom._merge._mergeOptions.bloom.weight = value;
  100. }
  101. this._bloomWeight = value;
  102. }
  103. @serialize()
  104. public get bloomWeight(): number {
  105. return this._bloomWeight;
  106. }
  107. /**
  108. * The scale of the bloom, lower value will provide better performance.
  109. */
  110. public set bloomScale(value: number) {
  111. if (this._bloomScale === value) {
  112. return;
  113. }
  114. this._bloomScale = value;
  115. this._buildPipeline();
  116. }
  117. @serialize()
  118. public get bloomScale(): number {
  119. return this._bloomScale;
  120. }
  121. /**
  122. * Enable or disable the bloom from the pipeline
  123. */
  124. public set bloomEnabled(enabled: boolean) {
  125. if (this._bloomEnabled === enabled) {
  126. return;
  127. }
  128. this._bloomEnabled = enabled;
  129. this._buildPipeline();
  130. }
  131. @serialize()
  132. public get bloomEnabled(): boolean {
  133. return this._bloomEnabled;
  134. }
  135. /**
  136. * If the depth of field is enabled.
  137. */
  138. @serialize()
  139. public get depthOfFieldEnabled(): boolean {
  140. return this._depthOfFieldEnabled;
  141. }
  142. public set depthOfFieldEnabled(enabled: boolean) {
  143. if (this._depthOfFieldEnabled === enabled) {
  144. return;
  145. }
  146. this._depthOfFieldEnabled = enabled;
  147. this._buildPipeline();
  148. }
  149. /**
  150. * Blur level of the depth of field effect. (Higher blur will effect performance)
  151. */
  152. @serialize()
  153. public get depthOfFieldBlurLevel(): DepthOfFieldEffectBlurLevel {
  154. return this._depthOfFieldBlurLevel;
  155. }
  156. public set depthOfFieldBlurLevel(value: DepthOfFieldEffectBlurLevel) {
  157. if (this._depthOfFieldBlurLevel === value) {
  158. return;
  159. }
  160. this._depthOfFieldBlurLevel = value;
  161. // recreate dof and dispose old as this setting is not dynamic
  162. var oldDof = this.depthOfField;
  163. this.depthOfField = new DepthOfFieldEffect(this._scene, null, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType);
  164. this.depthOfField.focalLength = oldDof.focalLength;
  165. this.depthOfField.focusDistance = oldDof.focusDistance;
  166. this.depthOfField.fStop = oldDof.fStop;
  167. this.depthOfField.lensSize = oldDof.lensSize;
  168. for (var i = 0; i < this._cameras.length; i++) {
  169. oldDof.disposeEffects(this._cameras[i]);
  170. }
  171. this._buildPipeline();
  172. }
  173. /**
  174. * If the anti aliasing is enabled.
  175. */
  176. public set fxaaEnabled(enabled: boolean) {
  177. if (this._fxaaEnabled === enabled) {
  178. return;
  179. }
  180. this._fxaaEnabled = enabled;
  181. this._buildPipeline();
  182. }
  183. @serialize()
  184. public get fxaaEnabled(): boolean {
  185. return this._fxaaEnabled;
  186. }
  187. /**
  188. * If the multisample anti-aliasing is enabled.
  189. */
  190. public set msaaEnabled(enabled: boolean) {
  191. if (this._msaaEnabled === enabled) {
  192. return;
  193. }
  194. this._msaaEnabled = enabled;
  195. this._buildPipeline();
  196. }
  197. @serialize()
  198. public get msaaEnabled(): boolean {
  199. return this._msaaEnabled;
  200. }
  201. /**
  202. * If image processing is enabled.
  203. */
  204. public set imageProcessingEnabled(enabled: boolean) {
  205. if (this._imageProcessingEnabled === enabled) {
  206. return;
  207. }
  208. this._imageProcessingEnabled = enabled;
  209. this._buildPipeline();
  210. }
  211. @serialize()
  212. public get imageProcessingEnabled(): boolean {
  213. return this._imageProcessingEnabled;
  214. }
  215. /**
  216. * Enable or disable the chromaticAberration process from the pipeline
  217. */
  218. public set chromaticAberrationEnabled(enabled: boolean) {
  219. if (this._chromaticAberrationEnabled === enabled) {
  220. return;
  221. }
  222. this._chromaticAberrationEnabled = enabled;
  223. this._buildPipeline();
  224. }
  225. @serialize()
  226. public get chromaticAberrationEnabled(): boolean {
  227. return this._chromaticAberrationEnabled;
  228. }
  229. /**
  230. * @constructor
  231. * @param {string} name - The rendering pipeline name
  232. * @param {BABYLON.Scene} scene - The scene linked to this pipeline
  233. * @param {any} ratio - The size of the postprocesses (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5)
  234. * @param {BABYLON.Camera[]} cameras - The array of cameras that the rendering pipeline will be attached to
  235. * @param {boolean} automaticBuild - if false, you will have to manually call prepare() to update the pipeline
  236. */
  237. constructor(name: string, hdr: boolean, scene: Scene, cameras?: Camera[], automaticBuild = true) {
  238. super(scene.getEngine(), name);
  239. this._cameras = cameras ||  [];
  240. this._originalCameras = this._cameras.slice();
  241. this._buildAllowed = automaticBuild;
  242. // Initialize
  243. this._scene = scene;
  244. var caps = this._scene.getEngine().getCaps();
  245. this._hdr = hdr && (caps.textureHalfFloatRender || caps.textureFloatRender);
  246. // Misc
  247. if (this._hdr) {
  248. if (caps.textureHalfFloatRender) {
  249. this._defaultPipelineTextureType = Engine.TEXTURETYPE_HALF_FLOAT;
  250. }
  251. else if (caps.textureFloatRender) {
  252. this._defaultPipelineTextureType = Engine.TEXTURETYPE_FLOAT;
  253. }
  254. } else {
  255. this._defaultPipelineTextureType = Engine.TEXTURETYPE_UNSIGNED_INT;
  256. }
  257. // Attach
  258. scene.postProcessRenderPipelineManager.addPipeline(this);
  259. var engine = this._scene.getEngine();
  260. // Create post processes before hand so they can be modified before enabled.
  261. // Block compilation flag is set to true to avoid compilation prior to use, these will be updated on first use in build pipeline.
  262. this.sharpen = new SharpenPostProcess("sharpen", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType, true);
  263. this._sharpenEffect = new PostProcessRenderEffect(engine, this.SharpenPostProcessId, () => { return this.sharpen; }, true);
  264. this.depthOfField = new DepthOfFieldEffect(this._scene, null, this._depthOfFieldBlurLevel, this._defaultPipelineTextureType, true);
  265. this.chromaticAberration = new ChromaticAberrationPostProcess("ChromaticAberration", engine.getRenderWidth(), engine.getRenderHeight(), 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType, true);
  266. this._chromaticAberrationEffect = new PostProcessRenderEffect(engine, this.ChromaticAberrationPostProcessId, () => { return this.chromaticAberration; }, true);
  267. this._buildPipeline();
  268. }
  269. /**
  270. * Force the compilation of the entire pipeline.
  271. */
  272. public prepare(): void {
  273. let previousState = this._buildAllowed;
  274. this._buildAllowed = true;
  275. this._buildPipeline();
  276. this._buildAllowed = previousState;
  277. }
  278. private _prevPostProcess:Nullable<PostProcess> = null;
  279. private _prevPrevPostProcess:Nullable<PostProcess> = null;
  280. private _setAutoClearAndTextureSharing(postProcess:PostProcess, skipTextureSharing = false){
  281. if(this._prevPostProcess && this._prevPostProcess.autoClear){
  282. postProcess.autoClear = false;
  283. }else{
  284. postProcess.autoClear = true;
  285. }
  286. if(!skipTextureSharing){
  287. if(this._prevPrevPostProcess){
  288. postProcess.shareOutputWith(this._prevPrevPostProcess);
  289. }else{
  290. postProcess.useOwnOutput();
  291. }
  292. if(this._prevPostProcess){
  293. this._prevPrevPostProcess = this._prevPostProcess;
  294. }
  295. this._prevPostProcess = postProcess;
  296. }
  297. }
  298. private _buildPipeline() {
  299. if (!this._buildAllowed) {
  300. return;
  301. }
  302. var engine = this._scene.getEngine();
  303. this._disposePostProcesses();
  304. if (this._cameras !== null) {
  305. this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
  306. // get back cameras to be used to reattach pipeline
  307. this._cameras = this._originalCameras.slice();
  308. }
  309. this._reset();
  310. this._prevPostProcess = null;
  311. this._prevPrevPostProcess = null;
  312. if (this.sharpenEnabled) {
  313. if(!this.sharpen.isReady()){
  314. this.sharpen.updateEffect();
  315. }
  316. this.addEffect(this._sharpenEffect);
  317. this._setAutoClearAndTextureSharing(this.sharpen);
  318. }
  319. if (this.depthOfFieldEnabled) {
  320. var depthTexture = this._scene.enableDepthRenderer(this._cameras[0]).getDepthMap();
  321. this.depthOfField.depthTexture = depthTexture;
  322. if(!this.depthOfField._isReady()){
  323. this.depthOfField._updateEffects();
  324. }
  325. this.addEffect(this.depthOfField);
  326. this._setAutoClearAndTextureSharing(this.depthOfField._depthOfFieldMerge);
  327. }
  328. if (this.bloomEnabled) {
  329. this.bloom = new BloomEffect(this._scene, this.bloomScale, this.bloomKernel);
  330. this.addEffect(this.bloom);
  331. }
  332. if (this.fxaaEnabled) {
  333. this.fxaa = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, engine, false, this._defaultPipelineTextureType);
  334. this.addEffect(new PostProcessRenderEffect(engine, this.FxaaPostProcessId, () => { return this.fxaa; }, true));
  335. this._setAutoClearAndTextureSharing(this.fxaa);
  336. }
  337. if (this.chromaticAberrationEnabled) {
  338. if(!this.chromaticAberration.isReady()){
  339. this.chromaticAberration.updateEffect();
  340. }
  341. this.addEffect(this._chromaticAberrationEffect);
  342. this._setAutoClearAndTextureSharing(this.chromaticAberration);
  343. }
  344. if (this._cameras !== null) {
  345. this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline(this._name, this._cameras);
  346. }
  347. if(this.msaaEnabled){
  348. if(!this._enableMSAAOnFirstPostProcess()){
  349. BABYLON.Tools.Warn("MSAA failed to enable, MSAA is only supported in browsers that support webGL >= 2.0");
  350. }
  351. }
  352. }
  353. private _disposePostProcesses(disposeNonRecreated = false): void {
  354. for (var i = 0; i < this._cameras.length; i++) {
  355. var camera = this._cameras[i];
  356. if (this.imageProcessing) {
  357. this.imageProcessing.dispose(camera);
  358. }
  359. if (this.fxaa) {
  360. this.fxaa.dispose(camera);
  361. }
  362. // These are created in the constructor and should not be disposed on every pipeline change
  363. if(disposeNonRecreated){
  364. if (this.sharpen) {
  365. this.sharpen.dispose(camera);
  366. }
  367. if(this.depthOfField){
  368. this.depthOfField.disposeEffects(camera);
  369. }
  370. if(this.chromaticAberration){
  371. this.chromaticAberration.dispose(camera);
  372. }
  373. }
  374. }
  375. (<any>this.imageProcessing) = null;
  376. (<any>this.fxaa) = null;
  377. if(disposeNonRecreated){
  378. (<any>this.sharpen) = null;
  379. (<any>this.depthOfField) = null;
  380. (<any>this.chromaticAberration) = null;
  381. }
  382. }
  383. /**
  384. * Dispose of the pipeline and stop all post processes
  385. */
  386. public dispose(): void {
  387. this._disposePostProcesses(true);
  388. this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline(this._name, this._cameras);
  389. super.dispose();
  390. }
  391. /**
  392. * Serialize the rendering pipeline (Used when exporting)
  393. * @returns the serialized object
  394. */
  395. public serialize(): any {
  396. var serializationObject = SerializationHelper.Serialize(this);
  397. serializationObject.customType = "DefaultRenderingPipeline";
  398. return serializationObject;
  399. }
  400. /**
  401. * Parse the serialized pipeline
  402. * @param source Source pipeline.
  403. * @param scene The scene to load the pipeline to.
  404. * @param rootUrl The URL of the serialized pipeline.
  405. * @returns An instantiated pipeline from the serialized object.
  406. */
  407. public static Parse(source: any, scene: Scene, rootUrl: string): DefaultRenderingPipeline {
  408. return SerializationHelper.Parse(() => new DefaultRenderingPipeline(source._name, source._name._hdr, scene), source, scene, rootUrl);
  409. }
  410. }
  411. }