David Catuhe 6 vuotta sitten
vanhempi
commit
15a9d8ae5d
68 muutettua tiedostoa jossa 3250 lisäystä ja 2204 poistoa
  1. 158 4
      Playground/babylon.d.txt
  2. 29 8
      Tools/Gulp/tasks/gulpTasks-libraries.js
  3. 2 2
      Tools/Publisher/tasks/versionNumberManager.js
  4. 378 103
      dist/preview release/babylon.d.ts
  5. 1 1
      dist/preview release/babylon.js
  6. 518 247
      dist/preview release/babylon.max.js
  7. 1 1
      dist/preview release/babylon.max.js.map
  8. 595 713
      dist/preview release/babylon.module.d.ts
  9. 158 4
      dist/preview release/documentation.d.ts
  10. 1 1
      dist/preview release/glTF2Interface/package.json
  11. 2 2
      dist/preview release/gui/package.json
  12. 6 6
      dist/preview release/inspector/package.json
  13. 3 8
      dist/preview release/loaders/babylon.glTF2FileLoader.js
  14. 1 1
      dist/preview release/loaders/babylon.glTF2FileLoader.js.map
  15. 1 1
      dist/preview release/loaders/babylon.glTF2FileLoader.min.js
  16. 3 8
      dist/preview release/loaders/babylon.glTFFileLoader.js
  17. 1 1
      dist/preview release/loaders/babylon.glTFFileLoader.js.map
  18. 1 1
      dist/preview release/loaders/babylon.glTFFileLoader.min.js
  19. 3 8
      dist/preview release/loaders/babylonjs.loaders.js
  20. 1 1
      dist/preview release/loaders/babylonjs.loaders.js.map
  21. 2 2
      dist/preview release/loaders/babylonjs.loaders.min.js
  22. 3 3
      dist/preview release/loaders/package.json
  23. 2 2
      dist/preview release/materialsLibrary/package.json
  24. 2 2
      dist/preview release/nodeEditor/package.json
  25. 1 1
      dist/preview release/package.json
  26. 1 1
      dist/preview release/packagesSizeBaseLine.json
  27. 2 2
      dist/preview release/postProcessesLibrary/package.json
  28. 2 2
      dist/preview release/proceduralTexturesLibrary/package.json
  29. 3 3
      dist/preview release/serializers/package.json
  30. 595 713
      dist/preview release/viewer/babylon.module.d.ts
  31. 31 27
      dist/preview release/viewer/babylon.viewer.js
  32. 2 2
      dist/preview release/viewer/babylon.viewer.max.js
  33. 7 1
      dist/preview release/what's new.md
  34. 0 2
      loaders/src/glTF/2.0/Extensions/KHR_materials_pbrSpecularGlossiness.ts
  35. 0 1
      loaders/src/glTF/2.0/Extensions/KHR_materials_unlit.ts
  36. 4 5
      loaders/src/glTF/2.0/glTFLoader.ts
  37. 1 1
      package.json
  38. 1 1
      src/Audio/audioSceneComponent.ts
  39. 16 14
      src/Cameras/XR/webXRCamera.ts
  40. 13 16
      src/Cameras/XR/webXREnterExitUI.ts
  41. 27 37
      src/Cameras/XR/webXRExperienceHelper.ts
  42. 114 44
      src/Cameras/XR/webXRInput.ts
  43. 27 8
      src/Cameras/XR/webXRManagedOutputCanvas.ts
  44. 89 89
      src/Cameras/XR/webXRSessionManager.ts
  45. 10 2
      src/Engines/engine.ts
  46. 10 10
      src/Gamepads/xboxGamepad.ts
  47. 1 1
      src/Helpers/sceneHelpers.ts
  48. 106 30
      src/LibDeclarations/webxr.d.ts
  49. 10 8
      src/Materials/Textures/texture.ts
  50. 192 0
      src/Materials/effectRenderer.ts
  51. 2 1
      src/Materials/index.ts
  52. 4 2
      src/Meshes/Builders/cylinderBuilder.ts
  53. 3 0
      src/Meshes/instancedMesh.ts
  54. 10 1
      src/Misc/assetsManager.ts
  55. 5 3
      src/Misc/basis.ts
  56. 2 2
      src/Misc/webRequest.ts
  57. 5 1
      src/Physics/Plugins/ammoJSPlugin.ts
  58. 3 0
      src/Physics/physicsImpostor.ts
  59. 31 3
      src/Rendering/depthRenderer.ts
  60. 5 4
      src/Rendering/depthRendererSceneComponent.ts
  61. 16 0
      src/Shaders/ShadersInclude/packingFunctions.fx
  62. 1 0
      src/Shaders/ShadersInclude/shadowsFragmentFunctions.fx
  63. 17 1
      src/Shaders/depth.fragment.fx
  64. 1 16
      src/Shaders/kernelBlur.fragment.fx
  65. 1 10
      src/Shaders/shadowMap.fragment.fx
  66. 1 9
      src/Shaders/standard.fragment.fx
  67. 6 0
      src/scene.ts
  68. BIN
      tests/validation/ReferenceImages/depthRenderer.png

+ 158 - 4
Playground/babylon.d.txt

@@ -5027,7 +5027,7 @@ declare module BABYLON {
         /**
          * Add callback functions in this array to update all the requests before they get sent to the network
          */
-        static CustomRequestModifiers: ((request: XMLHttpRequest) => void)[];
+        static CustomRequestModifiers: ((request: XMLHttpRequest, url: string) => void)[];
         private _injectCustomRequestHeaders;
         /**
          * Gets or sets a function to be called when loading progress changes
@@ -18315,7 +18315,7 @@ declare module BABYLON {
          * This represents a texture in babylon. It can be easily loaded from a network, base64 or html input.
          * @see http://doc.babylonjs.com/babylon101/materials#texture
          * @param url define the url of the picture to load as a texture
-         * @param scene define the scene the texture will belong to
+         * @param scene define the scene or engine the texture will belong to
          * @param noMipmap define if the texture will require mip maps or not
          * @param invertY define if the texture needs to be inverted on the y axis during loading
          * @param samplingMode define the sampling mode we want for the texture while fectching from it (Texture.NEAREST_SAMPLINGMODE...)
@@ -18325,7 +18325,7 @@ declare module BABYLON {
          * @param deleteBuffer define if the buffer we are loading the texture from should be deleted after load
          * @param format define the format of the texture we are trying to load (Engine.TEXTUREFORMAT_RGBA...)
          */
-        constructor(url: Nullable<string>, scene: Nullable<Scene>, noMipmap?: boolean, invertY?: boolean, samplingMode?: number, onLoad?: Nullable<() => void>, onError?: Nullable<(message?: string, exception?: any) => void>, buffer?: Nullable<string | ArrayBuffer | HTMLImageElement | Blob>, deleteBuffer?: boolean, format?: number);
+        constructor(url: Nullable<string>, sceneOrEngine: Nullable<Scene | Engine>, noMipmap?: boolean, invertY?: boolean, samplingMode?: number, onLoad?: Nullable<() => void>, onError?: Nullable<(message?: string, exception?: any) => void>, buffer?: Nullable<string | ArrayBuffer | HTMLImageElement | Blob>, deleteBuffer?: boolean, format?: number);
         /**
          * Update the url (and optional buffer) of this texture if url was null during construction.
          * @param url the url of the texture
@@ -27565,6 +27565,11 @@ declare module BABYLON {
          */
         readonly webGLVersion: number;
         /**
+         * Gets a string idenfifying the name of the class
+         * @returns "Engine" string
+         */
+        getClassName(): string;
+        /**
          * Returns true if the stencil buffer has been enabled through the creation option of the context.
          */
         readonly isStencilEnable: boolean;
@@ -51460,6 +51465,83 @@ declare module BABYLON {
 }
 declare module BABYLON {
     /**
+     * Helper class to render one or more effects
+     */
+    export class EffectRenderer {
+        private engine;
+        private static _Vertices;
+        private static _Indices;
+        private _vertexBuffers;
+        private _indexBuffer;
+        private _ringBufferIndex;
+        private _ringScreenBuffer;
+        private _getNextFrameBuffer;
+        /**
+         * Creates an effect renderer
+         * @param engine the engine to use for rendering
+         */
+        constructor(engine: Engine);
+        /**
+         * renders one or more effects to a specified texture
+         * @param effectWrappers list of effects to renderer
+         * @param outputTexture texture to draw to, if null it will render to the screen
+         */
+        render(effectWrappers: Array<EffectWrapper> | EffectWrapper, outputTexture?: Nullable<Texture>): void;
+        /**
+         * Disposes of the effect renderer
+         */
+        dispose(): void;
+    }
+    /**
+     * Options to create an EffectWrapper
+     */
+    interface EffectWrapperCreationOptions {
+        /**
+         * Engine to use to create the effect
+         */
+        engine: Engine;
+        /**
+         * Fragment shader for the effect
+         */
+        fragmentShader: string;
+        /**
+         * Attributes to use in the shader
+         */
+        attributeNames: Array<string>;
+        /**
+         * Uniforms to use in the shader
+         */
+        uniformNames: Array<string>;
+        /**
+         * Texture sampler names to use in the shader
+         */
+        samplerNames: Array<string>;
+    }
+    /**
+     * Wraps an effect to be used for rendering
+     */
+    export class EffectWrapper {
+        /**
+         * Event that is fired right before the effect is drawn (should be used to update uniforms)
+         */
+        onApplyObservable: Observable<{}>;
+        /**
+         * The underlying effect
+         */
+        effect: Effect;
+        /**
+         * Creates an effect to be renderer
+         * @param creationOptions options to create the effect
+         */
+        constructor(creationOptions: EffectWrapperCreationOptions);
+        /**
+        * Disposes of the effect wrapper
+        */
+        dispose(): void;
+    }
+}
+declare module BABYLON {
+    /**
      * Helper class to push actions to a pool of workers.
      */
     export class WorkerPool implements IDisposable {
@@ -57585,6 +57667,12 @@ declare module BABYLON {
          */
         useDefaultLoadingScreen: boolean;
         /**
+         * Gets or sets a boolean defining if the AssetsManager should automatically hide the loading screen
+         * when all assets have been downloaded.
+         * If set to false, you need to manually call in hideLoadingUI() once your scene is ready.
+         */
+        autoHideLoadingUI: boolean;
+        /**
          * Creates a new AssetsManager
          * @param scene defines the scene to work on
          */
@@ -58506,19 +58594,23 @@ interface Window {
     DracoDecoderModule: any;
     setImmediate(handler: (...args: any[]) => void): number;
 }
+
 interface HTMLCanvasElement {
     requestPointerLock(): void;
     msRequestPointerLock?(): void;
     mozRequestPointerLock?(): void;
     webkitRequestPointerLock?(): void;
+
     /** Track wether a record is in progress */
     isRecording: boolean;
     /** Capture Stream method defined by some browsers */
     captureStream(fps?: number): MediaStream;
 }
+
 interface CanvasRenderingContext2D {
     msImageSmoothingEnabled: boolean;
 }
+
 interface MouseEvent {
     mozMovementX: number;
     mozMovementY: number;
@@ -58527,34 +58619,43 @@ interface MouseEvent {
     msMovementX: number;
     msMovementY: number;
 }
+
 interface Navigator {
     mozGetVRDevices: (any: any) => any;
     webkitGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
     mozGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
     msGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
+
     webkitGetGamepads(): Gamepad[];
     msGetGamepads(): Gamepad[];
     webkitGamepads(): Gamepad[];
 }
+
 interface HTMLVideoElement {
     mozSrcObject: any;
 }
+
 interface Math {
     fround(x: number): number;
     imul(a: number, b: number): number;
 }
+
 interface WebGLRenderingContext {
     drawArraysInstanced(mode: number, first: number, count: number, primcount: number): void;
     drawElementsInstanced(mode: number, count: number, type: number, offset: number, primcount: number): void;
     vertexAttribDivisor(index: number, divisor: number): void;
+
     createVertexArray(): any;
     bindVertexArray(vao?: WebGLVertexArrayObject | null): void;
     deleteVertexArray(vao: WebGLVertexArrayObject): void;
+
     blitFramebuffer(srcX0: number, srcY0: number, srcX1: number, srcY1: number, dstX0: number, dstY0: number, dstX1: number, dstY1: number, mask: number, filter: number): void;
     renderbufferStorageMultisample(target: number, samples: number, internalformat: number, width: number, height: number): void;
+
     bindBufferBase(target: number, index: number, buffer: WebGLBuffer | null): void;
     getUniformBlockIndex(program: WebGLProgram, uniformBlockName: string): number;
     uniformBlockBinding(program: WebGLProgram, uniformBlockIndex: number, uniformBlockBinding: number): void;
+
     // Queries
     createQuery(): WebGLQuery;
     deleteQuery(query: WebGLQuery): void;
@@ -58562,11 +58663,13 @@ interface WebGLRenderingContext {
     endQuery(target: number): void;
     getQueryParameter(query: WebGLQuery, pname: number): any;
     getQuery(target: number, pname: number): any;
+
     MAX_SAMPLES: number;
     RGBA8: number;
     READ_FRAMEBUFFER: number;
     DRAW_FRAMEBUFFER: number;
     UNIFORM_BUFFER: number;
+
     HALF_FLOAT_OES: number;
     RGBA16F: number;
     RGBA32F: number;
@@ -58580,23 +58683,29 @@ interface WebGLRenderingContext {
     RG: number;
     R8: number;
     RG8: number;
+
     UNSIGNED_INT_24_8: number;
     DEPTH24_STENCIL8: number;
+
     /* Multiple Render Targets */
     drawBuffers(buffers: number[]): void;
     readBuffer(src: number): void;
+
     readonly COLOR_ATTACHMENT0: number;                             // 0x8CE1
     readonly COLOR_ATTACHMENT1: number;                             // 0x8CE2
     readonly COLOR_ATTACHMENT2: number;                             // 0x8CE3
     readonly COLOR_ATTACHMENT3: number;                             // 0x8CE4
+
     // Occlusion Query
     ANY_SAMPLES_PASSED_CONSERVATIVE: number;
     ANY_SAMPLES_PASSED: number;
     QUERY_RESULT_AVAILABLE: number;
     QUERY_RESULT: number;
 }
+
 interface WebGLProgram {
private __SPECTOR_rebuildProgram?: ((vertexSourceCode: string, fragmentSourceCode: string, onCompiled: (program: WebGLProgram) => void, onError: (message: string) => void) => void) | null;
 }
+
 interface EXT_disjoint_timer_query {
     QUERY_COUNTER_BITS_EXT: number;
     TIME_ELAPSED_EXT: number;
@@ -58611,12 +58720,14 @@ interface EXT_disjoint_timer_query {
     getQueryObjectEXT(query: WebGLQuery, target: number): any;
     deleteQueryEXT(query: WebGLQuery): void;
 }
+
 interface WebGLUniformLocation {
private _currentState: any;
 }
 // Type definitions for WebGL 2, Editor's Draft Fri Feb 24 16:10:18 2017 -0800
 // Project: https://www.khronos.org/registry/webgl/specs/latest/2.0/
 // Definitions by: Nico Kemnitz <https://github.com/nkemnitz/>
 // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
+
 interface WebGLRenderingContext {
     readonly RASTERIZER_DISCARD: number;
     readonly DEPTH_COMPONENT24: number;
@@ -58668,10 +58779,13 @@ interface WebGLRenderingContext {
     readonly UNSIGNED_INT_10F_11F_11F_REV: number;
     readonly UNSIGNED_INT_5_9_9_9_REV: number;
     readonly FLOAT_32_UNSIGNED_INT_24_8_REV: number;
+
     texImage3D(target: number, level: number, internalformat: number, width: number, height: number, depth: number, border: number, format: number, type: number, pixels: ArrayBufferView | null): void;
     texImage3D(target: number, level: number, internalformat: number, width: number, height: number, depth: number, border: number, format: number, type: number, pixels: ArrayBufferView, offset: number): void;
     texImage3D(target: number, level: number, internalformat: number, width: number, height: number, depth: number, border: number, format: number, type: number, pixels: ImageBitmap | ImageData | HTMLVideoElement | HTMLImageElement | HTMLCanvasElement): void;
+
     compressedTexImage3D(target: number, level: number, internalformat: number, width: number, height: number, depth: number, border: number, data: ArrayBufferView, offset?: number, length?: number): void;
+
     readonly TRANSFORM_FEEDBACK: number;
     readonly INTERLEAVED_ATTRIBS: number;
     readonly TRANSFORM_FEEDBACK_BUFFER: number;
@@ -58681,100 +58795,123 @@ interface WebGLRenderingContext {
     beginTransformFeedback(primitiveMode: number): void;
     endTransformFeedback(): void;
     transformFeedbackVaryings(program: WebGLProgram, varyings: string[], bufferMode: number): void;
+
     clearBufferfv(buffer: number, drawbuffer: number, values: ArrayBufferView, srcOffset: number | null): void;
     clearBufferiv(buffer: number, drawbuffer: number, values: ArrayBufferView, srcOffset: number | null): void;
     clearBufferuiv(buffer: number, drawbuffer: number, values: ArrayBufferView, srcOffset: number | null): void;
     clearBufferfi(buffer: number, drawbuffer: number, depth: number, stencil: number): void;
 }
+
 interface ImageBitmap {
     readonly width: number;
     readonly height: number;
     close(): void;
 }
+
 interface WebGLQuery extends WebGLObject {
 }
+
 declare var WebGLQuery: {
     prototype: WebGLQuery;
     new(): WebGLQuery;
 };
+
 interface WebGLSampler extends WebGLObject {
 }
+
 declare var WebGLSampler: {
     prototype: WebGLSampler;
     new(): WebGLSampler;
 };
+
 interface WebGLSync extends WebGLObject {
 }
+
 declare var WebGLSync: {
     prototype: WebGLSync;
     new(): WebGLSync;
 };
+
 interface WebGLTransformFeedback extends WebGLObject {
 }
+
 declare var WebGLTransformFeedback: {
     prototype: WebGLTransformFeedback;
     new(): WebGLTransformFeedback;
 };
+
 interface WebGLVertexArrayObject extends WebGLObject {
 }
+
 declare var WebGLVertexArrayObject: {
     prototype: WebGLVertexArrayObject;
     new(): WebGLVertexArrayObject;
 };
+
 // Type definitions for WebVR API
 // Project: https://w3c.github.io/webvr/
 // Definitions by: six a <https://github.com/lostfictions>
 // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
+
 interface VRDisplay extends EventTarget {
     /**
      * Dictionary of capabilities describing the VRDisplay.
      */
     readonly capabilities: VRDisplayCapabilities;
+
     /**
      * z-depth defining the far plane of the eye view frustum
      * enables mapping of values in the render target depth
      * attachment to scene coordinates. Initially set to 10000.0.
      */
     depthFar: number;
+
     /**
      * z-depth defining the near plane of the eye view frustum
      * enables mapping of values in the render target depth
      * attachment to scene coordinates. Initially set to 0.01.
      */
     depthNear: number;
+
     /**
      * An identifier for this distinct VRDisplay. Used as an
      * association point in the Gamepad API.
      */
     readonly displayId: number;
+
     /**
      * A display name, a user-readable name identifying it.
      */
     readonly displayName: string;
     readonly isConnected: boolean;
     readonly isPresenting: boolean;
+
     /**
      * If this VRDisplay supports room-scale experiences, the optional
      * stage attribute contains details on the room-scale parameters.
      */
     readonly stageParameters: VRStageParameters | null;
+
     /**
      * Passing the value returned by `requestAnimationFrame` to
      * `cancelAnimationFrame` will unregister the callback.
      * @param handle Define the hanle of the request to cancel
      */
     cancelAnimationFrame(handle: number): void;
+
     /**
      * Stops presenting to the VRDisplay.
      * @returns a promise to know when it stopped
      */
     exitPresent(): Promise<void>;
+
     /**
      * Return the current VREyeParameters for the given eye.
      * @param whichEye Define the eye we want the parameter for
      * @returns the eye parameters
      */
     getEyeParameters(whichEye: string): VREyeParameters;
+
     /**
      * Populates the passed VRFrameData with the information required to render
      * the current frame.
@@ -58782,11 +58919,13 @@ interface VRDisplay extends EventTarget {
      * @returns true if ok otherwise false
      */
     getFrameData(frameData: VRFrameData): boolean;
+
     /**
      * Get the layers currently being presented.
      * @returns the list of VR layers
      */
     getLayers(): VRLayer[];
+
     /**
      * Return a VRPose containing the future predicted pose of the VRDisplay
      * when the current frame will be presented. The value returned will not
@@ -58797,12 +58936,14 @@ interface VRDisplay extends EventTarget {
      * @returns the pose object
      */
     getPose(): VRPose;
+
     /**
      * Return the current instantaneous pose of the VRDisplay, with no
      * prediction applied.
      * @returns the current instantaneous pose
      */
     getImmediatePose(): VRPose;
+
     /**
      * The callback passed to `requestAnimationFrame` will be called
      * any time a new frame should be rendered. When the VRDisplay is
@@ -58815,6 +58956,7 @@ interface VRDisplay extends EventTarget {
      * @returns the request handle it
      */
     requestAnimationFrame(callback: FrameRequestCallback): number;
+
     /**
      * Begin presenting to the VRDisplay. Must be called in response to a user gesture.
      * Repeat calls while already presenting will update the VRLayers being displayed.
@@ -58822,6 +58964,7 @@ interface VRDisplay extends EventTarget {
      * @returns a promise to know when the request has been fulfilled
      */
     requestPresent(layers: VRLayer[]): Promise<void>;
+
     /**
      * Reset the pose for this display, treating its current position and
      * orientation as the "origin/zero" values. VRPose.position,
@@ -58830,6 +58973,7 @@ interface VRDisplay extends EventTarget {
      * sitting-space experiences.
      */
     resetPose(): void;
+
     /**
      * The VRLayer provided to the VRDisplay will be captured and presented
      * in the HMD. Calling this function has the same effect on the source
@@ -58839,15 +58983,18 @@ interface VRDisplay extends EventTarget {
      */
     submitFrame(pose?: VRPose): void;
 }
+
 declare var VRDisplay: {
     prototype: VRDisplay;
     new(): VRDisplay;
 };
+
 interface VRLayer {
     leftBounds?: number[] | Float32Array | null;
     rightBounds?: number[] | Float32Array | null;
     source?: HTMLCanvasElement | null;
 }
+
 interface VRDisplayCapabilities {
     readonly canPresent: boolean;
     readonly hasExternalDisplay: boolean;
@@ -58855,6 +59002,7 @@ interface VRDisplayCapabilities {
     readonly hasPosition: boolean;
     readonly maxLayers: number;
 }
+
 interface VREyeParameters {
     /** @deprecated */
     readonly fieldOfView: VRFieldOfView;
@@ -58862,12 +59010,14 @@ interface VREyeParameters {
     readonly renderHeight: number;
     readonly renderWidth: number;
 }
+
 interface VRFieldOfView {
     readonly downDegrees: number;
     readonly leftDegrees: number;
     readonly rightDegrees: number;
     readonly upDegrees: number;
 }
+
 interface VRFrameData {
     readonly leftProjectionMatrix: Float32Array;
     readonly leftViewMatrix: Float32Array;
@@ -58876,6 +59026,7 @@ interface VRFrameData {
     readonly rightViewMatrix: Float32Array;
     readonly timestamp: number;
 }
+
 interface VRPose {
     readonly angularAcceleration: Float32Array | null;
     readonly angularVelocity: Float32Array | null;
@@ -58885,15 +59036,18 @@ interface VRPose {
     readonly position: Float32Array | null;
     readonly timestamp: number;
 }
+
 interface VRStageParameters {
     sittingToStandingTransform?: Float32Array;
     sizeX?: number;
     sizeY?: number;
 }
+
 interface Navigator {
     getVRDisplays(): Promise<VRDisplay[]>;
     readonly activeVRDisplays: ReadonlyArray<VRDisplay>;
 }
+
 interface Window {
     onvrdisplayconnected: ((this: Window, ev: Event) => any) | null;
     onvrdisplaydisconnected: ((this: Window, ev: Event) => any) | null;
@@ -58902,6 +59056,7 @@ interface Window {
     addEventListener(type: "vrdisplaydisconnected", listener: (ev: Event) => any, useCapture?: boolean): void;
     addEventListener(type: "vrdisplaypresentchange", listener: (ev: Event) => any, useCapture?: boolean): void;
 }
+
 interface Gamepad {
     readonly displayId: number;
 }
@@ -58946,7 +59101,6 @@ declare var XRWebGLLayer: {
     prototype: XRWebGLLayer;
     new(session: XRSession, context?: WebGLRenderingContext): XRWebGLLayer;
 };
-
 declare module BABYLON.GUI {
     /**
      * Class used to specific a value and its associated unit

+ 29 - 8
Tools/Gulp/tasks/gulpTasks-libraries.js

@@ -106,11 +106,24 @@ var buildAMDDTSFiles = function(libraries, settings, cb) {
 /**
  * Append Lose DTS Files allowing isolated Modules build
  */
-var appendLoseDTSFiles = function(settings) {
+var appendLoseDTSFiles = function(settings, moduleFile) {
     if (settings.build.loseDTSFiles) {
-        return gulp.src([config.computed.tempTypingsFilePath, path.join(settings.computed.srcDirectory, settings.build.loseDTSFiles.glob)])
-            .pipe(concat(config.computed.tempTypingsFileName))
-            .pipe(gulp.dest(config.computed.tempFolder));
+        let library = settings.libraries[0];
+        if (!library.preventLoadLibrary) {
+            // Convert Module to Namespace for globals
+            var outputDirectory = settings.computed.distDirectory;
+
+            // Find declaration path.
+            let fileName = settings.build.umd.processDeclaration.filename;
+            if (!moduleFile) {
+                fileName = fileName.replace(".module", "");
+            }
+
+            let fileLocation = path.join(outputDirectory, fileName);
+            return gulp.src([fileLocation, path.join(settings.computed.srcDirectory, settings.build.loseDTSFiles.glob)])
+                .pipe(concat(fileName))
+                .pipe(gulp.dest(outputDirectory));
+        }
     }
     return Promise.resolve();
 }
@@ -139,10 +152,15 @@ var processDTSFiles = function(libraries, settings, cb) {
 
         // Convert Module to Namespace for globals
         if (!commandLineOptions.noNamespace) {
-            processModuleDeclarationToNamespace(fileLocation, settings.build.umd.packageName, settings.build.umd.processDeclaration);
+            processModuleDeclarationToNamespace(fileLocation, settings.build.umd.packageName, settings.build.umd.processDeclaration, cb);
+        }
+        else {
+            cb();
         }
     }
-    cb();
+    else {
+        cb();
+    }
 }
 
 /**
@@ -158,10 +176,13 @@ function buildExternalLibraries(settings) {
     var buildMax = function() { return buildExternalLibrariesMultiEntry(settings.libraries, settings, false) };
 
     var buildAMDDTS = function(cb) { return buildAMDDTSFiles(settings.libraries, settings, cb) };
-    var appendLoseDTS = function() { return appendLoseDTSFiles(settings) };
     var processDTS = function(cb) { return processDTSFiles(settings.libraries, settings, cb) };
+    var appendLoseDTS = [function() { return appendLoseDTSFiles(settings, true) }];
+    if (!commandLineOptions.noNamespace) {
+        appendLoseDTS.push(function() { return appendLoseDTSFiles(settings, false) });
+    }
 
-    tasks.push(cleanup, shaders, buildMin, buildMax, buildAMDDTS, appendLoseDTS, processDTS);
+    tasks.push(cleanup, shaders, buildMin, buildMax, buildAMDDTS, processDTS, ...appendLoseDTS);
 
     return gulp.series.apply(this, tasks);
 }

+ 2 - 2
Tools/Publisher/tasks/versionNumberManager.js

@@ -11,10 +11,10 @@ const enginePath = path.join(config.core.computed.mainDirectory, "Engines/engine
  * Get the version from the engine class for Babylon
  */
 function getEngineVersion() {
-    colorConsole.log("Get version from engine.ts");
+    colorConsole.log("Get version from engine.ts", enginePath);
     const engineContent = fs.readFileSync(enginePath).toString();
 
-    const versionRegex = new RegExp(`public static get Version\\(\\): string {[\\s\\S]*return "([\\s\\S]*?)";[\\s\\S]*}`, "gm");
+    const versionRegex = new RegExp(`public static get Version\\(\\): string {\\s*return "(\\S*)";\\s*}`, "gm");
     const match = versionRegex.exec(engineContent);
     if (match && match.length) {
         const version = match[1];

+ 378 - 103
dist/preview release/babylon.d.ts

@@ -5030,7 +5030,7 @@ declare module BABYLON {
         /**
          * Add callback functions in this array to update all the requests before they get sent to the network
          */
-        static CustomRequestModifiers: ((request: XMLHttpRequest) => void)[];
+        static CustomRequestModifiers: ((request: XMLHttpRequest, url: string) => void)[];
         private _injectCustomRequestHeaders;
         /**
          * Gets or sets a function to be called when loading progress changes
@@ -18614,7 +18614,7 @@ declare module BABYLON {
          * This represents a texture in babylon. It can be easily loaded from a network, base64 or html input.
          * @see http://doc.babylonjs.com/babylon101/materials#texture
          * @param url define the url of the picture to load as a texture
-         * @param scene define the scene the texture will belong to
+         * @param scene define the scene or engine the texture will belong to
          * @param noMipmap define if the texture will require mip maps or not
          * @param invertY define if the texture needs to be inverted on the y axis during loading
          * @param samplingMode define the sampling mode we want for the texture while fectching from it (Texture.NEAREST_SAMPLINGMODE...)
@@ -18624,7 +18624,7 @@ declare module BABYLON {
          * @param deleteBuffer define if the buffer we are loading the texture from should be deleted after load
          * @param format define the format of the texture we are trying to load (Engine.TEXTUREFORMAT_RGBA...)
          */
-        constructor(url: Nullable<string>, scene: Nullable<Scene>, noMipmap?: boolean, invertY?: boolean, samplingMode?: number, onLoad?: Nullable<() => void>, onError?: Nullable<(message?: string, exception?: any) => void>, buffer?: Nullable<string | ArrayBuffer | HTMLImageElement | Blob>, deleteBuffer?: boolean, format?: number);
+        constructor(url: Nullable<string>, sceneOrEngine: Nullable<Scene | Engine>, noMipmap?: boolean, invertY?: boolean, samplingMode?: number, onLoad?: Nullable<() => void>, onError?: Nullable<(message?: string, exception?: any) => void>, buffer?: Nullable<string | ArrayBuffer | HTMLImageElement | Blob>, deleteBuffer?: boolean, format?: number);
         /**
          * Update the url (and optional buffer) of this texture if url was null during construction.
          * @param url the url of the texture
@@ -28093,6 +28093,11 @@ declare module BABYLON {
          */
         readonly webGLVersion: number;
         /**
+         * Gets a string idenfifying the name of the class
+         * @returns "Engine" string
+         */
+        getClassName(): string;
+        /**
          * Returns true if the stencil buffer has been enabled through the creation option of the context.
          */
         readonly isStencilEnable: boolean;
@@ -40466,7 +40471,7 @@ declare module BABYLON {
 }
 declare module BABYLON {
     /**
-     * Manages an XRSession
+     * Manages an XRSession to work with Babylon's engine
      * @see https://doc.babylonjs.com/how_to/webxr
      */
     export class WebXRSessionManager implements IDisposable {
@@ -40479,17 +40484,22 @@ declare module BABYLON {
          * Fires when the xr session is ended either by the device or manually done
          */
         onXRSessionEnded: Observable<any>;
-        /** @hidden */
-        _xrSession: XRSession;
-        /** @hidden */
-        _frameOfReference: XRFrameOfReference;
+        /**
+         * Underlying xr session
+         */
+        session: XRSession;
+        /**
+         * Type of reference space used when creating the session
+         */
+        referenceSpace: XRReferenceSpace;
         /** @hidden */
         _sessionRenderTargetTexture: Nullable<RenderTargetTexture>;
-        /** @hidden */
-        _currentXRFrame: Nullable<XRFrame>;
+        /**
+         * Current XR frame
+         */
+        currentFrame: Nullable<XRFrame>;
         private _xrNavigator;
-        private _xrDevice;
-        private _tmpMatrix;
+        private baseLayer;
         /**
          * Constructs a WebXRSessionManager, this must be initialized within a user action before usage
          * @param scene The scene which the session should be created for
@@ -40502,36 +40512,46 @@ declare module BABYLON {
          */
         initializeAsync(): Promise<void>;
         /**
-         * Enters XR with the desired XR session options, this must be done with a user action (eg. button click event)
-         * @param sessionCreationOptions xr options to create the session with
-         * @param frameOfReferenceType option to configure how the xr pose is expressed
-         * @returns Promise which resolves after it enters XR
+         * Initializes an xr session
+         * @param xrSessionMode mode to initialize
+         * @returns a promise which will resolve once the session has been initialized
+         */
+        initializeSessionAsync(xrSessionMode: XRSessionMode): any;
+        /**
+         * Sets the reference space on the xr session
+         * @param referenceSpace space to set
+         * @returns a promise that will resolve once the reference space has been set
+         */
+        setReferenceSpaceAsync(referenceSpace: XRReferenceSpaceType): Promise<void>;
+        /**
+         * Updates the render state of the session
+         * @param state state to set
+         * @returns a promise that resolves once the render state has been updated
+         */
+        updateRenderStateAsync(state: any): Promise<void>;
+        /**
+         * Starts rendering to the xr layer
+         * @returns a promise that will resolve once rendering has started
          */
-        enterXRAsync(sessionCreationOptions: XRSessionCreationOptions, frameOfReferenceType: string): Promise<void>;
+        startRenderingToXRAsync(): Promise<void>;
         /**
          * Stops the xrSession and restores the renderloop
          * @returns Promise which resolves after it exits XR
          */
         exitXRAsync(): Promise<void>;
         /**
-         * Fires a ray and returns the closest hit in the xr sessions enviornment, useful to place objects in AR
-         * @param ray ray to cast into the environment
-         * @returns Promise which resolves with a collision point in the environment if it exists
-         */
-        environmentPointHitTestAsync(ray: Ray): Promise<Nullable<Vector3>>;
-        /**
          * Checks if a session would be supported for the creation options specified
-         * @param options creation options to check if they are supported
+         * @param sessionMode session mode to check if supported eg. immersive-vr
          * @returns true if supported
          */
-        supportsSessionAsync(options: XRSessionCreationOptions): Promise<boolean>;
+        supportsSessionAsync(sessionMode: XRSessionMode): any;
         /**
          * @hidden
          * Converts the render layer of xrSession to a render target
          * @param session session to create render target for
          * @param scene scene the new render target should be created for
          */
-        static _CreateRenderTargetTextureFromSession(session: XRSession, scene: Scene): RenderTargetTexture;
+        static _CreateRenderTargetTextureFromSession(session: XRSession, scene: Scene, baseLayer: XRWebGLLayer): RenderTargetTexture;
         /**
          * Disposes of the session manager
          */
@@ -40564,6 +40584,42 @@ declare module BABYLON {
 }
 declare module BABYLON {
     /**
+     * Creates a canvas that is added/removed from the webpage when entering/exiting XR
+     */
+    export class WebXRManagedOutputCanvas implements IDisposable {
+        private helper;
+        private _canvas;
+        /**
+         * xrpresent context of the canvas which can be used to display/mirror xr content
+         */
+        canvasContext: WebGLRenderingContext;
+        /**
+         * xr layer for the canvas
+         */
+        xrLayer: Nullable<XRWebGLLayer>;
+        /**
+         * Initializes the xr layer for the session
+         * @param xrSession xr session
+         * @returns a promise that will resolve once the XR Layer has been created
+         */
+        initializeXRLayerAsync(xrSession: any): any;
+        /**
+         * Initializes the canvas to be added/removed upon entering/exiting xr
+         * @param helper the xr experience helper used to trigger adding/removing of the canvas
+         * @param canvas The canvas to be added/removed (If not specified a full screen canvas will be created)
+         */
+        constructor(helper: WebXRExperienceHelper, canvas?: HTMLCanvasElement);
+        /**
+         * Disposes of the object
+         */
+        dispose(): void;
+        private _setManagedOutputCanvas;
+        private _addCanvas;
+        private _removeCanvas;
+    }
+}
+declare module BABYLON {
+    /**
      * States of the webXR experience
      */
     export enum WebXRState {
@@ -40608,8 +40664,8 @@ declare module BABYLON {
          * Fires when the state of the experience helper has changed
          */
         onStateChangedObservable: Observable<WebXRState>;
-        /** @hidden */
-        _sessionManager: WebXRSessionManager;
+        /** Session manager used to keep track of xr session */
+        sessionManager: WebXRSessionManager;
         private _nonVRCamera;
         private _originalSceneAutoClear;
         private _supported;
@@ -40632,16 +40688,11 @@ declare module BABYLON {
         /**
          * Enters XR mode (This must be done within a user interaction in most browsers eg. button click)
          * @param sessionCreationOptions options for the XR session
-         * @param frameOfReference frame of reference of the XR session
+         * @param referenceSpaceType frame of reference of the XR session
+         * @param outputCanvas the output canvas that will be used to enter XR mode
          * @returns promise that resolves after xr mode has entered
          */
-        enterXRAsync(sessionCreationOptions: XRSessionCreationOptions, frameOfReference: string): Promise<void>;
-        /**
-         * Fires a ray and returns the closest hit in the xr sessions enviornment, useful to place objects in AR
-         * @param ray ray to cast into the environment
-         * @returns Promise which resolves with a collision point in the environment if it exists
-         */
-        environmentPointHitTestAsync(ray: Ray): Promise<Nullable<Vector3>>;
+        enterXRAsync(sessionCreationOptions: XRSessionMode, referenceSpaceType: XRReferenceSpaceType, outputCanvas: WebXRManagedOutputCanvas): any;
         /**
          * Updates the global position of the camera by moving the camera's container
          * This should be used instead of modifying the camera's position as it will be overwritten by an xrSessions's update frame
@@ -40655,12 +40706,6 @@ declare module BABYLON {
          */
         rotateCameraByQuaternionUsingContainer(rotation: Quaternion): void;
         /**
-         * Checks if the creation options are supported by the xr session
-         * @param options creation options
-         * @returns true if supported
-         */
-        supportsSessionAsync(options: XRSessionCreationOptions): Promise<boolean>;
-        /**
          * Disposes of the experience helper
          */
         dispose(): void;
@@ -40674,17 +40719,22 @@ declare module BABYLON {
         /** button element */
         element: HTMLElement;
         /** XR initialization options for the button */
-        initializationOptions: XRSessionCreationOptions;
+        sessionMode: XRSessionMode;
+        /** Reference space type */
+        referenceSpaceType: XRReferenceSpaceType;
         /**
          * Creates a WebXREnterExitUIButton
          * @param element button element
-         * @param initializationOptions XR initialization options for the button
+         * @param sessionMode XR initialization session mode
+         * @param referenceSpaceType the type of reference space to be used
          */
         constructor(
         /** button element */
         element: HTMLElement, 
         /** XR initialization options for the button */
-        initializationOptions: XRSessionCreationOptions);
+        sessionMode: XRSessionMode, 
+        /** Reference space type */
+        referenceSpaceType: XRReferenceSpaceType);
         /**
          * Overwritable function which can be used to update the button's visuals when the state changes
          * @param activeButton the current active button in the UI
@@ -40698,7 +40748,7 @@ declare module BABYLON {
         /**
          * Context to enter xr with
          */
-        outputCanvasContext?: Nullable<WebGLRenderingContext>;
+        webXRManagedOutputCanvas?: Nullable<WebXRManagedOutputCanvas>;
         /**
          * User provided buttons to enable/disable WebXR. The system will provide default if not set
          */
@@ -40741,6 +40791,10 @@ declare module BABYLON {
      * Represents an XR input
      */
     export class WebXRController {
+        private scene;
+        /** The underlying input source for the controller  */
+        inputSource: XRInputSource;
+        private parentContainer;
         /**
          * Represents the part of the controller that is held. This may not exist if the controller is the head mounted display itself, if thats the case only the pointer from the head will be availible
          */
@@ -40749,12 +40803,23 @@ declare module BABYLON {
          * Pointer which can be used to select objects or attach a visible laser to
          */
         pointer: AbstractMesh;
+        private _tmpMatrix;
         /**
          * Creates the controller
          * @see https://doc.babylonjs.com/how_to/webxr
          * @param scene the scene which the controller should be associated to
+         * @param inputSource the underlying input source for the controller
+         * @param parentContainer parent that the controller meshes should be children of
          */
-        constructor(scene: Scene);
+        constructor(scene: Scene, 
+        /** The underlying input source for the controller  */
+        inputSource: XRInputSource, parentContainer?: Nullable<AbstractMesh>);
+        /**
+         * Updates the controller pose based on the given XRFrame
+         * @param xrFrame xr frame to update the pose with
+         * @param referenceSpace reference space to use
+         */
+        updateFromXRFrame(xrFrame: XRFrame, referenceSpace: XRReferenceSpace): void;
         /**
          * Disposes of the object
          */
@@ -40769,42 +40834,26 @@ declare module BABYLON {
          * XR controllers being tracked
          */
         controllers: Array<WebXRController>;
-        private _tmpMatrix;
         private _frameObserver;
         /**
-         * Initializes the WebXRInput
-         * @param helper experience helper which the input should be created for
+         * Event when a controller has been connected/added
          */
-        constructor(helper: WebXRExperienceHelper);
+        onControllerAddedObservable: Observable<WebXRController>;
         /**
-         * Disposes of the object
+         * Event when a controller has been removed/disconnected
          */
-        dispose(): void;
-    }
-}
-declare module BABYLON {
-    /**
-     * Creates a canvas that is added/removed from the webpage when entering/exiting XR
-     */
-    export class WebXRManagedOutputCanvas implements IDisposable {
-        private _canvas;
+        onControllerRemovedObservable: Observable<WebXRController>;
         /**
-         * xrpresent context of the canvas which can be used to display/mirror xr content
-         */
-        canvasContext: Nullable<WebGLRenderingContext>;
-        /**
-         * Initializes the canvas to be added/removed upon entering/exiting xr
-         * @param helper the xr experience helper used to trigger adding/removing of the canvas
-         * @param canvas The canvas to be added/removed (If not specified a full screen canvas will be created)
+         * Initializes the WebXRInput
+         * @param helper experience helper which the input should be created for
          */
-        constructor(helper: WebXRExperienceHelper, canvas?: HTMLCanvasElement);
+        constructor(helper: WebXRExperienceHelper);
+        private _onInputSourcesChange;
+        private _addAndRemoveControllers;
         /**
          * Disposes of the object
          */
         dispose(): void;
-        private _setManagedOutputCanvas;
-        private _addCanvas;
-        private _removeCanvas;
     }
 }
 declare module BABYLON {
@@ -52261,6 +52310,83 @@ declare module BABYLON {
 }
 declare module BABYLON {
     /**
+     * Helper class to render one or more effects
+     */
+    export class EffectRenderer {
+        private engine;
+        private static _Vertices;
+        private static _Indices;
+        private _vertexBuffers;
+        private _indexBuffer;
+        private _ringBufferIndex;
+        private _ringScreenBuffer;
+        private _getNextFrameBuffer;
+        /**
+         * Creates an effect renderer
+         * @param engine the engine to use for rendering
+         */
+        constructor(engine: Engine);
+        /**
+         * renders one or more effects to a specified texture
+         * @param effectWrappers list of effects to renderer
+         * @param outputTexture texture to draw to, if null it will render to the screen
+         */
+        render(effectWrappers: Array<EffectWrapper> | EffectWrapper, outputTexture?: Nullable<Texture>): void;
+        /**
+         * Disposes of the effect renderer
+         */
+        dispose(): void;
+    }
+    /**
+     * Options to create an EffectWrapper
+     */
+    interface EffectWrapperCreationOptions {
+        /**
+         * Engine to use to create the effect
+         */
+        engine: Engine;
+        /**
+         * Fragment shader for the effect
+         */
+        fragmentShader: string;
+        /**
+         * Attributes to use in the shader
+         */
+        attributeNames: Array<string>;
+        /**
+         * Uniforms to use in the shader
+         */
+        uniformNames: Array<string>;
+        /**
+         * Texture sampler names to use in the shader
+         */
+        samplerNames: Array<string>;
+    }
+    /**
+     * Wraps an effect to be used for rendering
+     */
+    export class EffectWrapper {
+        /**
+         * Event that is fired right before the effect is drawn (should be used to update uniforms)
+         */
+        onApplyObservable: Observable<{}>;
+        /**
+         * The underlying effect
+         */
+        effect: Effect;
+        /**
+         * Creates an effect to be renderer
+         * @param creationOptions options to create the effect
+         */
+        constructor(creationOptions: EffectWrapperCreationOptions);
+        /**
+        * Disposes of the effect wrapper
+        */
+        dispose(): void;
+    }
+}
+declare module BABYLON {
+    /**
      * Helper class to push actions to a pool of workers.
      */
     export class WorkerPool implements IDisposable {
@@ -58441,6 +58567,12 @@ declare module BABYLON {
          */
         useDefaultLoadingScreen: boolean;
         /**
+         * Gets or sets a boolean defining if the AssetsManager should automatically hide the loading screen
+         * when all assets have been downloaded.
+         * If set to false, you need to manually call in hideLoadingUI() once your scene is ready.
+         */
+        autoHideLoadingUI: boolean;
+        /**
          * Creates a new AssetsManager
          * @param scene defines the scene to work on
          */
@@ -59362,19 +59494,23 @@ interface Window {
     DracoDecoderModule: any;
     setImmediate(handler: (...args: any[]) => void): number;
 }
+
 interface HTMLCanvasElement {
     requestPointerLock(): void;
     msRequestPointerLock?(): void;
     mozRequestPointerLock?(): void;
     webkitRequestPointerLock?(): void;
+
     /** Track wether a record is in progress */
     isRecording: boolean;
     /** Capture Stream method defined by some browsers */
     captureStream(fps?: number): MediaStream;
 }
+
 interface CanvasRenderingContext2D {
     msImageSmoothingEnabled: boolean;
 }
+
 interface MouseEvent {
     mozMovementX: number;
     mozMovementY: number;
@@ -59383,34 +59519,43 @@ interface MouseEvent {
     msMovementX: number;
     msMovementY: number;
 }
+
 interface Navigator {
     mozGetVRDevices: (any: any) => any;
     webkitGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
     mozGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
     msGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
+
     webkitGetGamepads(): Gamepad[];
     msGetGamepads(): Gamepad[];
     webkitGamepads(): Gamepad[];
 }
+
 interface HTMLVideoElement {
     mozSrcObject: any;
 }
+
 interface Math {
     fround(x: number): number;
     imul(a: number, b: number): number;
 }
+
 interface WebGLRenderingContext {
     drawArraysInstanced(mode: number, first: number, count: number, primcount: number): void;
     drawElementsInstanced(mode: number, count: number, type: number, offset: number, primcount: number): void;
     vertexAttribDivisor(index: number, divisor: number): void;
+
     createVertexArray(): any;
     bindVertexArray(vao?: WebGLVertexArrayObject | null): void;
     deleteVertexArray(vao: WebGLVertexArrayObject): void;
+
     blitFramebuffer(srcX0: number, srcY0: number, srcX1: number, srcY1: number, dstX0: number, dstY0: number, dstX1: number, dstY1: number, mask: number, filter: number): void;
     renderbufferStorageMultisample(target: number, samples: number, internalformat: number, width: number, height: number): void;
+
     bindBufferBase(target: number, index: number, buffer: WebGLBuffer | null): void;
     getUniformBlockIndex(program: WebGLProgram, uniformBlockName: string): number;
     uniformBlockBinding(program: WebGLProgram, uniformBlockIndex: number, uniformBlockBinding: number): void;
+
     // Queries
     createQuery(): WebGLQuery;
     deleteQuery(query: WebGLQuery): void;
@@ -59418,11 +59563,13 @@ interface WebGLRenderingContext {
     endQuery(target: number): void;
     getQueryParameter(query: WebGLQuery, pname: number): any;
     getQuery(target: number, pname: number): any;
+
     MAX_SAMPLES: number;
     RGBA8: number;
     READ_FRAMEBUFFER: number;
     DRAW_FRAMEBUFFER: number;
     UNIFORM_BUFFER: number;
+
     HALF_FLOAT_OES: number;
     RGBA16F: number;
     RGBA32F: number;
@@ -59436,24 +59583,30 @@ interface WebGLRenderingContext {
     RG: number;
     R8: number;
     RG8: number;
+
     UNSIGNED_INT_24_8: number;
     DEPTH24_STENCIL8: number;
+
     /* Multiple Render Targets */
     drawBuffers(buffers: number[]): void;
     readBuffer(src: number): void;
+
     readonly COLOR_ATTACHMENT0: number;                             // 0x8CE1
     readonly COLOR_ATTACHMENT1: number;                             // 0x8CE2
     readonly COLOR_ATTACHMENT2: number;                             // 0x8CE3
     readonly COLOR_ATTACHMENT3: number;                             // 0x8CE4
+
     // Occlusion Query
     ANY_SAMPLES_PASSED_CONSERVATIVE: number;
     ANY_SAMPLES_PASSED: number;
     QUERY_RESULT_AVAILABLE: number;
     QUERY_RESULT: number;
 }
+
 interface WebGLProgram {
     __SPECTOR_rebuildProgram?: ((vertexSourceCode: string, fragmentSourceCode: string, onCompiled: (program: WebGLProgram) => void, onError: (message: string) => void) => void) | null;
 }
+
 interface EXT_disjoint_timer_query {
     QUERY_COUNTER_BITS_EXT: number;
     TIME_ELAPSED_EXT: number;
@@ -59468,6 +59621,7 @@ interface EXT_disjoint_timer_query {
     getQueryObjectEXT(query: WebGLQuery, target: number): any;
     deleteQueryEXT(query: WebGLQuery): void;
 }
+
 interface WebGLUniformLocation {
     _currentState: any;
 }
@@ -59475,6 +59629,7 @@ interface WebGLUniformLocation {
 // Project: https://www.khronos.org/registry/webgl/specs/latest/2.0/
 // Definitions by: Nico Kemnitz <https://github.com/nkemnitz/>
 // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
+
 interface WebGLRenderingContext {
     readonly RASTERIZER_DISCARD: number;
     readonly DEPTH_COMPONENT24: number;
@@ -59526,10 +59681,13 @@ interface WebGLRenderingContext {
     readonly UNSIGNED_INT_10F_11F_11F_REV: number;
     readonly UNSIGNED_INT_5_9_9_9_REV: number;
     readonly FLOAT_32_UNSIGNED_INT_24_8_REV: number;
+
     texImage3D(target: number, level: number, internalformat: number, width: number, height: number, depth: number, border: number, format: number, type: number, pixels: ArrayBufferView | null): void;
     texImage3D(target: number, level: number, internalformat: number, width: number, height: number, depth: number, border: number, format: number, type: number, pixels: ArrayBufferView, offset: number): void;
     texImage3D(target: number, level: number, internalformat: number, width: number, height: number, depth: number, border: number, format: number, type: number, pixels: ImageBitmap | ImageData | HTMLVideoElement | HTMLImageElement | HTMLCanvasElement): void;
+
     compressedTexImage3D(target: number, level: number, internalformat: number, width: number, height: number, depth: number, border: number, data: ArrayBufferView, offset?: number, length?: number): void;
+
     readonly TRANSFORM_FEEDBACK: number;
     readonly INTERLEAVED_ATTRIBS: number;
     readonly TRANSFORM_FEEDBACK_BUFFER: number;
@@ -59539,100 +59697,123 @@ interface WebGLRenderingContext {
     beginTransformFeedback(primitiveMode: number): void;
     endTransformFeedback(): void;
     transformFeedbackVaryings(program: WebGLProgram, varyings: string[], bufferMode: number): void;
+
     clearBufferfv(buffer: number, drawbuffer: number, values: ArrayBufferView, srcOffset: number | null): void;
     clearBufferiv(buffer: number, drawbuffer: number, values: ArrayBufferView, srcOffset: number | null): void;
     clearBufferuiv(buffer: number, drawbuffer: number, values: ArrayBufferView, srcOffset: number | null): void;
     clearBufferfi(buffer: number, drawbuffer: number, depth: number, stencil: number): void;
 }
+
 interface ImageBitmap {
     readonly width: number;
     readonly height: number;
     close(): void;
 }
+
 interface WebGLQuery extends WebGLObject {
 }
+
 declare var WebGLQuery: {
     prototype: WebGLQuery;
     new(): WebGLQuery;
 };
+
 interface WebGLSampler extends WebGLObject {
 }
+
 declare var WebGLSampler: {
     prototype: WebGLSampler;
     new(): WebGLSampler;
 };
+
 interface WebGLSync extends WebGLObject {
 }
+
 declare var WebGLSync: {
     prototype: WebGLSync;
     new(): WebGLSync;
 };
+
 interface WebGLTransformFeedback extends WebGLObject {
 }
+
 declare var WebGLTransformFeedback: {
     prototype: WebGLTransformFeedback;
     new(): WebGLTransformFeedback;
 };
+
 interface WebGLVertexArrayObject extends WebGLObject {
 }
+
 declare var WebGLVertexArrayObject: {
     prototype: WebGLVertexArrayObject;
     new(): WebGLVertexArrayObject;
 };
+
 // Type definitions for WebVR API
 // Project: https://w3c.github.io/webvr/
 // Definitions by: six a <https://github.com/lostfictions>
 // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
+
 interface VRDisplay extends EventTarget {
     /**
      * Dictionary of capabilities describing the VRDisplay.
      */
     readonly capabilities: VRDisplayCapabilities;
+
     /**
      * z-depth defining the far plane of the eye view frustum
      * enables mapping of values in the render target depth
      * attachment to scene coordinates. Initially set to 10000.0.
      */
     depthFar: number;
+
     /**
      * z-depth defining the near plane of the eye view frustum
      * enables mapping of values in the render target depth
      * attachment to scene coordinates. Initially set to 0.01.
      */
     depthNear: number;
+
     /**
      * An identifier for this distinct VRDisplay. Used as an
      * association point in the Gamepad API.
      */
     readonly displayId: number;
+
     /**
      * A display name, a user-readable name identifying it.
      */
     readonly displayName: string;
     readonly isConnected: boolean;
     readonly isPresenting: boolean;
+
     /**
      * If this VRDisplay supports room-scale experiences, the optional
      * stage attribute contains details on the room-scale parameters.
      */
     readonly stageParameters: VRStageParameters | null;
+
     /**
      * Passing the value returned by `requestAnimationFrame` to
      * `cancelAnimationFrame` will unregister the callback.
      * @param handle Define the hanle of the request to cancel
      */
     cancelAnimationFrame(handle: number): void;
+
     /**
      * Stops presenting to the VRDisplay.
      * @returns a promise to know when it stopped
      */
     exitPresent(): Promise<void>;
+
     /**
      * Return the current VREyeParameters for the given eye.
      * @param whichEye Define the eye we want the parameter for
      * @returns the eye parameters
      */
     getEyeParameters(whichEye: string): VREyeParameters;
+
     /**
      * Populates the passed VRFrameData with the information required to render
      * the current frame.
@@ -59640,11 +59821,13 @@ interface VRDisplay extends EventTarget {
      * @returns true if ok otherwise false
      */
     getFrameData(frameData: VRFrameData): boolean;
+
     /**
      * Get the layers currently being presented.
      * @returns the list of VR layers
      */
     getLayers(): VRLayer[];
+
     /**
      * Return a VRPose containing the future predicted pose of the VRDisplay
      * when the current frame will be presented. The value returned will not
@@ -59655,12 +59838,14 @@ interface VRDisplay extends EventTarget {
      * @returns the pose object
      */
     getPose(): VRPose;
+
     /**
      * Return the current instantaneous pose of the VRDisplay, with no
      * prediction applied.
      * @returns the current instantaneous pose
      */
     getImmediatePose(): VRPose;
+
     /**
      * The callback passed to `requestAnimationFrame` will be called
      * any time a new frame should be rendered. When the VRDisplay is
@@ -59673,6 +59858,7 @@ interface VRDisplay extends EventTarget {
      * @returns the request handle it
      */
     requestAnimationFrame(callback: FrameRequestCallback): number;
+
     /**
      * Begin presenting to the VRDisplay. Must be called in response to a user gesture.
      * Repeat calls while already presenting will update the VRLayers being displayed.
@@ -59680,6 +59866,7 @@ interface VRDisplay extends EventTarget {
      * @returns a promise to know when the request has been fulfilled
      */
     requestPresent(layers: VRLayer[]): Promise<void>;
+
     /**
      * Reset the pose for this display, treating its current position and
      * orientation as the "origin/zero" values. VRPose.position,
@@ -59688,6 +59875,7 @@ interface VRDisplay extends EventTarget {
      * sitting-space experiences.
      */
     resetPose(): void;
+
     /**
      * The VRLayer provided to the VRDisplay will be captured and presented
      * in the HMD. Calling this function has the same effect on the source
@@ -59697,15 +59885,18 @@ interface VRDisplay extends EventTarget {
      */
     submitFrame(pose?: VRPose): void;
 }
+
 declare var VRDisplay: {
     prototype: VRDisplay;
     new(): VRDisplay;
 };
+
 interface VRLayer {
     leftBounds?: number[] | Float32Array | null;
     rightBounds?: number[] | Float32Array | null;
     source?: HTMLCanvasElement | null;
 }
+
 interface VRDisplayCapabilities {
     readonly canPresent: boolean;
     readonly hasExternalDisplay: boolean;
@@ -59713,6 +59904,7 @@ interface VRDisplayCapabilities {
     readonly hasPosition: boolean;
     readonly maxLayers: number;
 }
+
 interface VREyeParameters {
     /** @deprecated */
     readonly fieldOfView: VRFieldOfView;
@@ -59720,12 +59912,14 @@ interface VREyeParameters {
     readonly renderHeight: number;
     readonly renderWidth: number;
 }
+
 interface VRFieldOfView {
     readonly downDegrees: number;
     readonly leftDegrees: number;
     readonly rightDegrees: number;
     readonly upDegrees: number;
 }
+
 interface VRFrameData {
     readonly leftProjectionMatrix: Float32Array;
     readonly leftViewMatrix: Float32Array;
@@ -59734,6 +59928,7 @@ interface VRFrameData {
     readonly rightViewMatrix: Float32Array;
     readonly timestamp: number;
 }
+
 interface VRPose {
     readonly angularAcceleration: Float32Array | null;
     readonly angularVelocity: Float32Array | null;
@@ -59743,15 +59938,18 @@ interface VRPose {
     readonly position: Float32Array | null;
     readonly timestamp: number;
 }
+
 interface VRStageParameters {
     sittingToStandingTransform?: Float32Array;
     sizeX?: number;
     sizeY?: number;
 }
+
 interface Navigator {
     getVRDisplays(): Promise<VRDisplay[]>;
     readonly activeVRDisplays: ReadonlyArray<VRDisplay>;
 }
+
 interface Window {
     onvrdisplayconnected: ((this: Window, ev: Event) => any) | null;
     onvrdisplaydisconnected: ((this: Window, ev: Event) => any) | null;
@@ -59760,47 +59958,124 @@ interface Window {
     addEventListener(type: "vrdisplaydisconnected", listener: (ev: Event) => any, useCapture?: boolean): void;
     addEventListener(type: "vrdisplaypresentchange", listener: (ev: Event) => any, useCapture?: boolean): void;
 }
+
 interface Gamepad {
     readonly displayId: number;
 }
-interface XRDevice {
-    requestSession(options: XRSessionCreationOptions): Promise<XRSession>;
-    supportsSession(options: XRSessionCreationOptions): Promise<void>;
-}
+type XRSessionMode =
+    | "inline"
+    | "immersive-vr"
+    | "immersive-ar";
+
+type XRReferenceSpaceType =
+    | "viewer"
+    | "local"
+    | "local-floor"
+    | "bounded-floor"
+    | "unbounded";
+
+type XREnvironmentBlendMode =
+    | "opaque"
+    | "additive"
+    | "alpha-blend";
+
+type XRVisibilityState =
+    | "visible"
+    | "visible-blurred"
+    | "hidden";
+
+type XRHandedness =
+    | "none"
+    | "left"
+    | "right";
+
+type XRTargetRayMode =
+    | "gaze"
+    | "tracked-pointer"
+    | "screen";
+
+type XREye =
+    | "none"
+    | "left"
+    | "right";
+
+interface XRSpace extends EventTarget {
+
+}
+
+interface XRRenderState {
+    depthNear: number ;
+    depthFar: number ;
+    inlineVerticalFieldOfView: number | undefined;
+    baseLayer: XRWebGLLayer | undefined;
+}
+
+interface XRInputSource {
+    handedness: XRHandedness;
+    targetRayMode: XRTargetRayMode;
+    targetRaySpace: XRSpace;
+    gripSpace: XRSpace | undefined;
+    gamepad: Gamepad | undefined;
+    profiles: Array<string>;
+}
+
 interface XRSession {
-    getInputSources(): Array<any>;
-    baseLayer: XRWebGLLayer;
-    requestFrameOfReference(type: string): Promise<void>;
-    requestHitTest(origin: Float32Array, direction: Float32Array, frameOfReference: any): any;
-    end(): Promise<void>;
-    requestAnimationFrame: Function;
     addEventListener: Function;
+    requestReferenceSpace(type: XRReferenceSpaceType): Promise<XRReferenceSpace>;
+    updateRenderState(XRRenderStateInit: any): Promise<void>;
+    requestAnimationFrame: Function;
+    end(): Promise<void>;
+    renderState: XRRenderState;
+    inputSources: Array<XRInputSource>;
+
 }
-interface XRSessionCreationOptions {
-    outputContext?: WebGLRenderingContext | null;
-    immersive?: boolean;
-    environmentIntegration?: boolean;
-}
-interface XRLayer {
-    getViewport: Function;
-    framebufferWidth: number;
-    framebufferHeight: number;
-}
-interface XRView {
-    projectionMatrix: Float32Array;
+
+interface XRReferenceSpace extends XRSpace {
+    getOffsetReferenceSpace(originOffset: XRRigidTransform): XRReferenceSpace;
+    onreset: any;
 }
+
 interface XRFrame {
-    getDevicePose: Function;
-    getInputPose: Function;
-    views: Array<XRView>;
-    baseLayer: XRLayer;
+    session: XRSession;
+    getViewerPose(referenceSpace: XRReferenceSpace): XRViewerPose | undefined;
+    getPose(space: XRSpace, baseSpace: XRSpace): XRPose | undefined;
 }
-interface XRFrameOfReference {
+
+interface XRViewerPose extends XRPose {
+    views: Array<XRView>;
 }
-interface XRWebGLLayer extends XRLayer {
-    framebuffer: WebGLFramebuffer;
+
+interface XRPose {
+    transform: XRRigidTransform;
+    emulatedPosition: boolean;
 }
+
 declare var XRWebGLLayer: {
     prototype: XRWebGLLayer;
-    new(session: XRSession, context?: WebGLRenderingContext): XRWebGLLayer;
-};
+    new(session: XRSession, context: WebGLRenderingContext | undefined): XRWebGLLayer;
+};
+interface XRWebGLLayer {
+    framebuffer: WebGLFramebuffer;
+    framebufferWidth: number;
+    framebufferHeight: number;
+    getViewport: Function;
+}
+
+interface XRRigidTransform {
+    position: DOMPointReadOnly;
+    orientation: DOMPointReadOnly;
+    matrix: Float32Array;
+    inverse: XRRigidTransform;
+}
+
+interface XRView {
+    eye: XREye;
+    projectionMatrix: Float32Array;
+    transform: XRRigidTransform;
+}
+
+interface XRInputSourceChangeEvent {
+    session: XRSession;
+    removed: Array<XRInputSource>;
+    added: Array<XRInputSource>;
+}

Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 1 - 1
dist/preview release/babylon.js


Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 518 - 247
dist/preview release/babylon.max.js


Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 1 - 1
dist/preview release/babylon.max.js.map


Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 595 - 713
dist/preview release/babylon.module.d.ts


+ 158 - 4
dist/preview release/documentation.d.ts

@@ -5030,7 +5030,7 @@ declare module BABYLON {
         /**
          * Add callback functions in this array to update all the requests before they get sent to the network
          */
-        static CustomRequestModifiers: ((request: XMLHttpRequest) => void)[];
+        static CustomRequestModifiers: ((request: XMLHttpRequest, url: string) => void)[];
         private _injectCustomRequestHeaders;
         /**
          * Gets or sets a function to be called when loading progress changes
@@ -18614,7 +18614,7 @@ declare module BABYLON {
          * This represents a texture in babylon. It can be easily loaded from a network, base64 or html input.
          * @see http://doc.babylonjs.com/babylon101/materials#texture
          * @param url define the url of the picture to load as a texture
-         * @param scene define the scene the texture will belong to
+         * @param scene define the scene or engine the texture will belong to
          * @param noMipmap define if the texture will require mip maps or not
          * @param invertY define if the texture needs to be inverted on the y axis during loading
          * @param samplingMode define the sampling mode we want for the texture while fectching from it (Texture.NEAREST_SAMPLINGMODE...)
@@ -18624,7 +18624,7 @@ declare module BABYLON {
          * @param deleteBuffer define if the buffer we are loading the texture from should be deleted after load
          * @param format define the format of the texture we are trying to load (Engine.TEXTUREFORMAT_RGBA...)
          */
-        constructor(url: Nullable<string>, scene: Nullable<Scene>, noMipmap?: boolean, invertY?: boolean, samplingMode?: number, onLoad?: Nullable<() => void>, onError?: Nullable<(message?: string, exception?: any) => void>, buffer?: Nullable<string | ArrayBuffer | HTMLImageElement | Blob>, deleteBuffer?: boolean, format?: number);
+        constructor(url: Nullable<string>, sceneOrEngine: Nullable<Scene | Engine>, noMipmap?: boolean, invertY?: boolean, samplingMode?: number, onLoad?: Nullable<() => void>, onError?: Nullable<(message?: string, exception?: any) => void>, buffer?: Nullable<string | ArrayBuffer | HTMLImageElement | Blob>, deleteBuffer?: boolean, format?: number);
         /**
          * Update the url (and optional buffer) of this texture if url was null during construction.
          * @param url the url of the texture
@@ -28093,6 +28093,11 @@ declare module BABYLON {
          */
         readonly webGLVersion: number;
         /**
+         * Gets a string idenfifying the name of the class
+         * @returns "Engine" string
+         */
+        getClassName(): string;
+        /**
          * Returns true if the stencil buffer has been enabled through the creation option of the context.
          */
         readonly isStencilEnable: boolean;
@@ -52261,6 +52266,83 @@ declare module BABYLON {
 }
 declare module BABYLON {
     /**
+     * Helper class to render one or more effects
+     */
+    export class EffectRenderer {
+        private engine;
+        private static _Vertices;
+        private static _Indices;
+        private _vertexBuffers;
+        private _indexBuffer;
+        private _ringBufferIndex;
+        private _ringScreenBuffer;
+        private _getNextFrameBuffer;
+        /**
+         * Creates an effect renderer
+         * @param engine the engine to use for rendering
+         */
+        constructor(engine: Engine);
+        /**
+         * renders one or more effects to a specified texture
+         * @param effectWrappers list of effects to renderer
+         * @param outputTexture texture to draw to, if null it will render to the screen
+         */
+        render(effectWrappers: Array<EffectWrapper> | EffectWrapper, outputTexture?: Nullable<Texture>): void;
+        /**
+         * Disposes of the effect renderer
+         */
+        dispose(): void;
+    }
+    /**
+     * Options to create an EffectWrapper
+     */
+    interface EffectWrapperCreationOptions {
+        /**
+         * Engine to use to create the effect
+         */
+        engine: Engine;
+        /**
+         * Fragment shader for the effect
+         */
+        fragmentShader: string;
+        /**
+         * Attributes to use in the shader
+         */
+        attributeNames: Array<string>;
+        /**
+         * Uniforms to use in the shader
+         */
+        uniformNames: Array<string>;
+        /**
+         * Texture sampler names to use in the shader
+         */
+        samplerNames: Array<string>;
+    }
+    /**
+     * Wraps an effect to be used for rendering
+     */
+    export class EffectWrapper {
+        /**
+         * Event that is fired right before the effect is drawn (should be used to update uniforms)
+         */
+        onApplyObservable: Observable<{}>;
+        /**
+         * The underlying effect
+         */
+        effect: Effect;
+        /**
+         * Creates an effect to be renderer
+         * @param creationOptions options to create the effect
+         */
+        constructor(creationOptions: EffectWrapperCreationOptions);
+        /**
+        * Disposes of the effect wrapper
+        */
+        dispose(): void;
+    }
+}
+declare module BABYLON {
+    /**
      * Helper class to push actions to a pool of workers.
      */
     export class WorkerPool implements IDisposable {
@@ -58441,6 +58523,12 @@ declare module BABYLON {
          */
         useDefaultLoadingScreen: boolean;
         /**
+         * Gets or sets a boolean defining if the AssetsManager should automatically hide the loading screen
+         * when all assets have been downloaded.
+         * If set to false, you need to manually call in hideLoadingUI() once your scene is ready.
+         */
+        autoHideLoadingUI: boolean;
+        /**
          * Creates a new AssetsManager
          * @param scene defines the scene to work on
          */
@@ -59362,19 +59450,23 @@ interface Window {
     DracoDecoderModule: any;
     setImmediate(handler: (...args: any[]) => void): number;
 }
+
 interface HTMLCanvasElement {
     requestPointerLock(): void;
     msRequestPointerLock?(): void;
     mozRequestPointerLock?(): void;
     webkitRequestPointerLock?(): void;
+
     /** Track wether a record is in progress */
     isRecording: boolean;
     /** Capture Stream method defined by some browsers */
     captureStream(fps?: number): MediaStream;
 }
+
 interface CanvasRenderingContext2D {
     msImageSmoothingEnabled: boolean;
 }
+
 interface MouseEvent {
     mozMovementX: number;
     mozMovementY: number;
@@ -59383,34 +59475,43 @@ interface MouseEvent {
     msMovementX: number;
     msMovementY: number;
 }
+
 interface Navigator {
     mozGetVRDevices: (any: any) => any;
     webkitGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
     mozGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
     msGetUserMedia(constraints: MediaStreamConstraints, successCallback: NavigatorUserMediaSuccessCallback, errorCallback: NavigatorUserMediaErrorCallback): void;
+
     webkitGetGamepads(): Gamepad[];
     msGetGamepads(): Gamepad[];
     webkitGamepads(): Gamepad[];
 }
+
 interface HTMLVideoElement {
     mozSrcObject: any;
 }
+
 interface Math {
     fround(x: number): number;
     imul(a: number, b: number): number;
 }
+
 interface WebGLRenderingContext {
     drawArraysInstanced(mode: number, first: number, count: number, primcount: number): void;
     drawElementsInstanced(mode: number, count: number, type: number, offset: number, primcount: number): void;
     vertexAttribDivisor(index: number, divisor: number): void;
+
     createVertexArray(): any;
     bindVertexArray(vao?: WebGLVertexArrayObject | null): void;
     deleteVertexArray(vao: WebGLVertexArrayObject): void;
+
     blitFramebuffer(srcX0: number, srcY0: number, srcX1: number, srcY1: number, dstX0: number, dstY0: number, dstX1: number, dstY1: number, mask: number, filter: number): void;
     renderbufferStorageMultisample(target: number, samples: number, internalformat: number, width: number, height: number): void;
+
     bindBufferBase(target: number, index: number, buffer: WebGLBuffer | null): void;
     getUniformBlockIndex(program: WebGLProgram, uniformBlockName: string): number;
     uniformBlockBinding(program: WebGLProgram, uniformBlockIndex: number, uniformBlockBinding: number): void;
+
     // Queries
     createQuery(): WebGLQuery;
     deleteQuery(query: WebGLQuery): void;
@@ -59418,11 +59519,13 @@ interface WebGLRenderingContext {
     endQuery(target: number): void;
     getQueryParameter(query: WebGLQuery, pname: number): any;
     getQuery(target: number, pname: number): any;
+
     MAX_SAMPLES: number;
     RGBA8: number;
     READ_FRAMEBUFFER: number;
     DRAW_FRAMEBUFFER: number;
     UNIFORM_BUFFER: number;
+
     HALF_FLOAT_OES: number;
     RGBA16F: number;
     RGBA32F: number;
@@ -59436,24 +59539,30 @@ interface WebGLRenderingContext {
     RG: number;
     R8: number;
     RG8: number;
+
     UNSIGNED_INT_24_8: number;
     DEPTH24_STENCIL8: number;
+
     /* Multiple Render Targets */
     drawBuffers(buffers: number[]): void;
     readBuffer(src: number): void;
+
     readonly COLOR_ATTACHMENT0: number;                             // 0x8CE1
     readonly COLOR_ATTACHMENT1: number;                             // 0x8CE2
     readonly COLOR_ATTACHMENT2: number;                             // 0x8CE3
     readonly COLOR_ATTACHMENT3: number;                             // 0x8CE4
+
     // Occlusion Query
     ANY_SAMPLES_PASSED_CONSERVATIVE: number;
     ANY_SAMPLES_PASSED: number;
     QUERY_RESULT_AVAILABLE: number;
     QUERY_RESULT: number;
 }
+
 interface WebGLProgram {
     __SPECTOR_rebuildProgram?: ((vertexSourceCode: string, fragmentSourceCode: string, onCompiled: (program: WebGLProgram) => void, onError: (message: string) => void) => void) | null;
 }
+
 interface EXT_disjoint_timer_query {
     QUERY_COUNTER_BITS_EXT: number;
     TIME_ELAPSED_EXT: number;
@@ -59468,6 +59577,7 @@ interface EXT_disjoint_timer_query {
     getQueryObjectEXT(query: WebGLQuery, target: number): any;
     deleteQueryEXT(query: WebGLQuery): void;
 }
+
 interface WebGLUniformLocation {
     _currentState: any;
 }
@@ -59475,6 +59585,7 @@ interface WebGLUniformLocation {
 // Project: https://www.khronos.org/registry/webgl/specs/latest/2.0/
 // Definitions by: Nico Kemnitz <https://github.com/nkemnitz/>
 // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
+
 interface WebGLRenderingContext {
     readonly RASTERIZER_DISCARD: number;
     readonly DEPTH_COMPONENT24: number;
@@ -59526,10 +59637,13 @@ interface WebGLRenderingContext {
     readonly UNSIGNED_INT_10F_11F_11F_REV: number;
     readonly UNSIGNED_INT_5_9_9_9_REV: number;
     readonly FLOAT_32_UNSIGNED_INT_24_8_REV: number;
+
     texImage3D(target: number, level: number, internalformat: number, width: number, height: number, depth: number, border: number, format: number, type: number, pixels: ArrayBufferView | null): void;
     texImage3D(target: number, level: number, internalformat: number, width: number, height: number, depth: number, border: number, format: number, type: number, pixels: ArrayBufferView, offset: number): void;
     texImage3D(target: number, level: number, internalformat: number, width: number, height: number, depth: number, border: number, format: number, type: number, pixels: ImageBitmap | ImageData | HTMLVideoElement | HTMLImageElement | HTMLCanvasElement): void;
+
     compressedTexImage3D(target: number, level: number, internalformat: number, width: number, height: number, depth: number, border: number, data: ArrayBufferView, offset?: number, length?: number): void;
+
     readonly TRANSFORM_FEEDBACK: number;
     readonly INTERLEAVED_ATTRIBS: number;
     readonly TRANSFORM_FEEDBACK_BUFFER: number;
@@ -59539,100 +59653,123 @@ interface WebGLRenderingContext {
     beginTransformFeedback(primitiveMode: number): void;
     endTransformFeedback(): void;
     transformFeedbackVaryings(program: WebGLProgram, varyings: string[], bufferMode: number): void;
+
     clearBufferfv(buffer: number, drawbuffer: number, values: ArrayBufferView, srcOffset: number | null): void;
     clearBufferiv(buffer: number, drawbuffer: number, values: ArrayBufferView, srcOffset: number | null): void;
     clearBufferuiv(buffer: number, drawbuffer: number, values: ArrayBufferView, srcOffset: number | null): void;
     clearBufferfi(buffer: number, drawbuffer: number, depth: number, stencil: number): void;
 }
+
 interface ImageBitmap {
     readonly width: number;
     readonly height: number;
     close(): void;
 }
+
 interface WebGLQuery extends WebGLObject {
 }
+
 declare var WebGLQuery: {
     prototype: WebGLQuery;
     new(): WebGLQuery;
 };
+
 interface WebGLSampler extends WebGLObject {
 }
+
 declare var WebGLSampler: {
     prototype: WebGLSampler;
     new(): WebGLSampler;
 };
+
 interface WebGLSync extends WebGLObject {
 }
+
 declare var WebGLSync: {
     prototype: WebGLSync;
     new(): WebGLSync;
 };
+
 interface WebGLTransformFeedback extends WebGLObject {
 }
+
 declare var WebGLTransformFeedback: {
     prototype: WebGLTransformFeedback;
     new(): WebGLTransformFeedback;
 };
+
 interface WebGLVertexArrayObject extends WebGLObject {
 }
+
 declare var WebGLVertexArrayObject: {
     prototype: WebGLVertexArrayObject;
     new(): WebGLVertexArrayObject;
 };
+
 // Type definitions for WebVR API
 // Project: https://w3c.github.io/webvr/
 // Definitions by: six a <https://github.com/lostfictions>
 // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
+
 interface VRDisplay extends EventTarget {
     /**
      * Dictionary of capabilities describing the VRDisplay.
      */
     readonly capabilities: VRDisplayCapabilities;
+
     /**
      * z-depth defining the far plane of the eye view frustum
      * enables mapping of values in the render target depth
      * attachment to scene coordinates. Initially set to 10000.0.
      */
     depthFar: number;
+
     /**
      * z-depth defining the near plane of the eye view frustum
      * enables mapping of values in the render target depth
      * attachment to scene coordinates. Initially set to 0.01.
      */
     depthNear: number;
+
     /**
      * An identifier for this distinct VRDisplay. Used as an
      * association point in the Gamepad API.
      */
     readonly displayId: number;
+
     /**
      * A display name, a user-readable name identifying it.
      */
     readonly displayName: string;
     readonly isConnected: boolean;
     readonly isPresenting: boolean;
+
     /**
      * If this VRDisplay supports room-scale experiences, the optional
      * stage attribute contains details on the room-scale parameters.
      */
     readonly stageParameters: VRStageParameters | null;
+
     /**
      * Passing the value returned by `requestAnimationFrame` to
      * `cancelAnimationFrame` will unregister the callback.
      * @param handle Define the hanle of the request to cancel
      */
     cancelAnimationFrame(handle: number): void;
+
     /**
      * Stops presenting to the VRDisplay.
      * @returns a promise to know when it stopped
      */
     exitPresent(): Promise<void>;
+
     /**
      * Return the current VREyeParameters for the given eye.
      * @param whichEye Define the eye we want the parameter for
      * @returns the eye parameters
      */
     getEyeParameters(whichEye: string): VREyeParameters;
+
     /**
      * Populates the passed VRFrameData with the information required to render
      * the current frame.
@@ -59640,11 +59777,13 @@ interface VRDisplay extends EventTarget {
      * @returns true if ok otherwise false
      */
     getFrameData(frameData: VRFrameData): boolean;
+
     /**
      * Get the layers currently being presented.
      * @returns the list of VR layers
      */
     getLayers(): VRLayer[];
+
     /**
      * Return a VRPose containing the future predicted pose of the VRDisplay
      * when the current frame will be presented. The value returned will not
@@ -59655,12 +59794,14 @@ interface VRDisplay extends EventTarget {
      * @returns the pose object
      */
     getPose(): VRPose;
+
     /**
      * Return the current instantaneous pose of the VRDisplay, with no
      * prediction applied.
      * @returns the current instantaneous pose
      */
     getImmediatePose(): VRPose;
+
     /**
      * The callback passed to `requestAnimationFrame` will be called
      * any time a new frame should be rendered. When the VRDisplay is
@@ -59673,6 +59814,7 @@ interface VRDisplay extends EventTarget {
      * @returns the request handle it
      */
     requestAnimationFrame(callback: FrameRequestCallback): number;
+
     /**
      * Begin presenting to the VRDisplay. Must be called in response to a user gesture.
      * Repeat calls while already presenting will update the VRLayers being displayed.
@@ -59680,6 +59822,7 @@ interface VRDisplay extends EventTarget {
      * @returns a promise to know when the request has been fulfilled
      */
     requestPresent(layers: VRLayer[]): Promise<void>;
+
     /**
      * Reset the pose for this display, treating its current position and
      * orientation as the "origin/zero" values. VRPose.position,
@@ -59688,6 +59831,7 @@ interface VRDisplay extends EventTarget {
      * sitting-space experiences.
      */
     resetPose(): void;
+
     /**
      * The VRLayer provided to the VRDisplay will be captured and presented
      * in the HMD. Calling this function has the same effect on the source
@@ -59697,15 +59841,18 @@ interface VRDisplay extends EventTarget {
      */
     submitFrame(pose?: VRPose): void;
 }
+
 declare var VRDisplay: {
     prototype: VRDisplay;
     new(): VRDisplay;
 };
+
 interface VRLayer {
     leftBounds?: number[] | Float32Array | null;
     rightBounds?: number[] | Float32Array | null;
     source?: HTMLCanvasElement | null;
 }
+
 interface VRDisplayCapabilities {
     readonly canPresent: boolean;
     readonly hasExternalDisplay: boolean;
@@ -59713,6 +59860,7 @@ interface VRDisplayCapabilities {
     readonly hasPosition: boolean;
     readonly maxLayers: number;
 }
+
 interface VREyeParameters {
     /** @deprecated */
     readonly fieldOfView: VRFieldOfView;
@@ -59720,12 +59868,14 @@ interface VREyeParameters {
     readonly renderHeight: number;
     readonly renderWidth: number;
 }
+
 interface VRFieldOfView {
     readonly downDegrees: number;
     readonly leftDegrees: number;
     readonly rightDegrees: number;
     readonly upDegrees: number;
 }
+
 interface VRFrameData {
     readonly leftProjectionMatrix: Float32Array;
     readonly leftViewMatrix: Float32Array;
@@ -59734,6 +59884,7 @@ interface VRFrameData {
     readonly rightViewMatrix: Float32Array;
     readonly timestamp: number;
 }
+
 interface VRPose {
     readonly angularAcceleration: Float32Array | null;
     readonly angularVelocity: Float32Array | null;
@@ -59743,15 +59894,18 @@ interface VRPose {
     readonly position: Float32Array | null;
     readonly timestamp: number;
 }
+
 interface VRStageParameters {
     sittingToStandingTransform?: Float32Array;
     sizeX?: number;
     sizeY?: number;
 }
+
 interface Navigator {
     getVRDisplays(): Promise<VRDisplay[]>;
     readonly activeVRDisplays: ReadonlyArray<VRDisplay>;
 }
+
 interface Window {
     onvrdisplayconnected: ((this: Window, ev: Event) => any) | null;
     onvrdisplaydisconnected: ((this: Window, ev: Event) => any) | null;
@@ -59760,6 +59914,7 @@ interface Window {
     addEventListener(type: "vrdisplaydisconnected", listener: (ev: Event) => any, useCapture?: boolean): void;
     addEventListener(type: "vrdisplaypresentchange", listener: (ev: Event) => any, useCapture?: boolean): void;
 }
+
 interface Gamepad {
     readonly displayId: number;
 }
@@ -59804,7 +59959,6 @@ declare var XRWebGLLayer: {
     prototype: XRWebGLLayer;
     new(session: XRSession, context?: WebGLRenderingContext): XRWebGLLayer;
 };
-
 declare module BABYLON.GUI {
     /**
      * Class used to specific a value and its associated unit

+ 1 - 1
dist/preview release/glTF2Interface/package.json

@@ -1,7 +1,7 @@
 {
     "name": "babylonjs-gltf2interface",
     "description": "A typescript declaration of babylon's gltf2 inteface.",
-    "version": "4.1.0-alpha.6",
+    "version": "4.1.0-alpha.7",
     "repository": {
         "type": "git",
         "url": "https://github.com/BabylonJS/Babylon.js.git"

+ 2 - 2
dist/preview release/gui/package.json

@@ -4,7 +4,7 @@
     },
     "name": "babylonjs-gui",
     "description": "The Babylon.js GUI library is an extension you can use to generate interactive user interface. It is build on top of the DynamicTexture.",
-    "version": "4.1.0-alpha.6",
+    "version": "4.1.0-alpha.7",
     "repository": {
         "type": "git",
         "url": "https://github.com/BabylonJS/Babylon.js.git"
@@ -28,7 +28,7 @@
     ],
     "license": "Apache-2.0",
     "dependencies": {
-        "babylonjs": "4.1.0-alpha.6"
+        "babylonjs": "4.1.0-alpha.7"
     },
     "engines": {
         "node": "*"

+ 6 - 6
dist/preview release/inspector/package.json

@@ -4,7 +4,7 @@
     },
     "name": "babylonjs-inspector",
     "description": "The Babylon.js inspector.",
-    "version": "4.1.0-alpha.6",
+    "version": "4.1.0-alpha.7",
     "repository": {
         "type": "git",
         "url": "https://github.com/BabylonJS/Babylon.js.git"
@@ -29,11 +29,11 @@
     ],
     "license": "Apache-2.0",
     "dependencies": {
-        "babylonjs": "4.1.0-alpha.6",
-        "babylonjs-gui": "4.1.0-alpha.6",
-        "babylonjs-loaders": "4.1.0-alpha.6",
-        "babylonjs-serializers": "4.1.0-alpha.6",
-        "babylonjs-gltf2interface": "4.1.0-alpha.6"
+        "babylonjs": "4.1.0-alpha.7",
+        "babylonjs-gui": "4.1.0-alpha.7",
+        "babylonjs-loaders": "4.1.0-alpha.7",
+        "babylonjs-serializers": "4.1.0-alpha.7",
+        "babylonjs-gltf2interface": "4.1.0-alpha.7"
     },
     "devDependencies": {
         "@types/react": "~16.7.3",

+ 3 - 8
dist/preview release/loaders/babylon.glTF2FileLoader.js

@@ -567,13 +567,11 @@ var KHR_materials_pbrSpecularGlossiness = /** @class */ (function () {
         babylonMaterial.microSurface = properties.glossinessFactor == undefined ? 1 : properties.glossinessFactor;
         if (properties.diffuseTexture) {
             promises.push(this._loader.loadTextureInfoAsync(context + "/diffuseTexture", properties.diffuseTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Diffuse)";
                 babylonMaterial.albedoTexture = texture;
             }));
         }
         if (properties.specularGlossinessTexture) {
             promises.push(this._loader.loadTextureInfoAsync(context + "/specularGlossinessTexture", properties.specularGlossinessTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Specular Glossiness)";
                 babylonMaterial.reflectivityTexture = texture;
             }));
             babylonMaterial.reflectivityTexture.hasAlpha = true;
@@ -646,7 +644,6 @@ var KHR_materials_unlit = /** @class */ (function () {
             }
             if (properties.baseColorTexture) {
                 promises.push(this._loader.loadTextureInfoAsync(context + "/baseColorTexture", properties.baseColorTexture, function (texture) {
-                    texture.name = babylonMaterial.name + " (Base Color)";
                     babylonMaterial.albedoTexture = texture;
                 }));
             }
@@ -2749,13 +2746,11 @@ var GLTFLoader = /** @class */ (function () {
             babylonMaterial.roughness = properties.roughnessFactor == undefined ? 1 : properties.roughnessFactor;
             if (properties.baseColorTexture) {
                 promises.push(this.loadTextureInfoAsync(context + "/baseColorTexture", properties.baseColorTexture, function (texture) {
-                    texture.name = babylonMaterial.name + " (Base Color)";
                     babylonMaterial.albedoTexture = texture;
                 }));
             }
             if (properties.metallicRoughnessTexture) {
                 promises.push(this.loadTextureInfoAsync(context + "/metallicRoughnessTexture", properties.metallicRoughnessTexture, function (texture) {
-                    texture.name = babylonMaterial.name + " (Metallic Roughness)";
                     babylonMaterial.metallicTexture = texture;
                 }));
                 babylonMaterial.useMetallnessFromMetallicTextureBlue = true;
@@ -2866,7 +2861,6 @@ var GLTFLoader = /** @class */ (function () {
         }
         if (material.normalTexture) {
             promises.push(this.loadTextureInfoAsync(context + "/normalTexture", material.normalTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Normal)";
                 babylonMaterial.bumpTexture = texture;
             }));
             babylonMaterial.invertNormalMapX = !this._babylonScene.useRightHandedSystem;
@@ -2878,7 +2872,6 @@ var GLTFLoader = /** @class */ (function () {
         }
         if (material.occlusionTexture) {
             promises.push(this.loadTextureInfoAsync(context + "/occlusionTexture", material.occlusionTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Occlusion)";
                 babylonMaterial.ambientTexture = texture;
             }));
             babylonMaterial.useAmbientInGrayScale = true;
@@ -2888,7 +2881,6 @@ var GLTFLoader = /** @class */ (function () {
         }
         if (material.emissiveTexture) {
             promises.push(this.loadTextureInfoAsync(context + "/emissiveTexture", material.emissiveTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Emissive)";
                 babylonMaterial.emissiveTexture = texture;
             }));
         }
@@ -2950,6 +2942,9 @@ var GLTFLoader = /** @class */ (function () {
         var texture = ArrayItem.Get(context + "/index", this._gltf.textures, textureInfo.index);
         var promise = this._loadTextureAsync("/textures/" + textureInfo.index, texture, function (babylonTexture) {
             babylonTexture.coordinatesIndex = textureInfo.texCoord || 0;
+            if (texture.name) {
+                babylonTexture.name = texture.name;
+            }
             GLTFLoader.AddPointerMetadata(babylonTexture, context);
             _this._parent.onTextureLoadedObservable.notifyObservers(babylonTexture);
             assign(babylonTexture);

Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 1 - 1
dist/preview release/loaders/babylon.glTF2FileLoader.js.map


Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 1 - 1
dist/preview release/loaders/babylon.glTF2FileLoader.min.js


+ 3 - 8
dist/preview release/loaders/babylon.glTFFileLoader.js

@@ -3116,13 +3116,11 @@ var KHR_materials_pbrSpecularGlossiness = /** @class */ (function () {
         babylonMaterial.microSurface = properties.glossinessFactor == undefined ? 1 : properties.glossinessFactor;
         if (properties.diffuseTexture) {
             promises.push(this._loader.loadTextureInfoAsync(context + "/diffuseTexture", properties.diffuseTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Diffuse)";
                 babylonMaterial.albedoTexture = texture;
             }));
         }
         if (properties.specularGlossinessTexture) {
             promises.push(this._loader.loadTextureInfoAsync(context + "/specularGlossinessTexture", properties.specularGlossinessTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Specular Glossiness)";
                 babylonMaterial.reflectivityTexture = texture;
             }));
             babylonMaterial.reflectivityTexture.hasAlpha = true;
@@ -3195,7 +3193,6 @@ var KHR_materials_unlit = /** @class */ (function () {
             }
             if (properties.baseColorTexture) {
                 promises.push(this._loader.loadTextureInfoAsync(context + "/baseColorTexture", properties.baseColorTexture, function (texture) {
-                    texture.name = babylonMaterial.name + " (Base Color)";
                     babylonMaterial.albedoTexture = texture;
                 }));
             }
@@ -5298,13 +5295,11 @@ var GLTFLoader = /** @class */ (function () {
             babylonMaterial.roughness = properties.roughnessFactor == undefined ? 1 : properties.roughnessFactor;
             if (properties.baseColorTexture) {
                 promises.push(this.loadTextureInfoAsync(context + "/baseColorTexture", properties.baseColorTexture, function (texture) {
-                    texture.name = babylonMaterial.name + " (Base Color)";
                     babylonMaterial.albedoTexture = texture;
                 }));
             }
             if (properties.metallicRoughnessTexture) {
                 promises.push(this.loadTextureInfoAsync(context + "/metallicRoughnessTexture", properties.metallicRoughnessTexture, function (texture) {
-                    texture.name = babylonMaterial.name + " (Metallic Roughness)";
                     babylonMaterial.metallicTexture = texture;
                 }));
                 babylonMaterial.useMetallnessFromMetallicTextureBlue = true;
@@ -5415,7 +5410,6 @@ var GLTFLoader = /** @class */ (function () {
         }
         if (material.normalTexture) {
             promises.push(this.loadTextureInfoAsync(context + "/normalTexture", material.normalTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Normal)";
                 babylonMaterial.bumpTexture = texture;
             }));
             babylonMaterial.invertNormalMapX = !this._babylonScene.useRightHandedSystem;
@@ -5427,7 +5421,6 @@ var GLTFLoader = /** @class */ (function () {
         }
         if (material.occlusionTexture) {
             promises.push(this.loadTextureInfoAsync(context + "/occlusionTexture", material.occlusionTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Occlusion)";
                 babylonMaterial.ambientTexture = texture;
             }));
             babylonMaterial.useAmbientInGrayScale = true;
@@ -5437,7 +5430,6 @@ var GLTFLoader = /** @class */ (function () {
         }
         if (material.emissiveTexture) {
             promises.push(this.loadTextureInfoAsync(context + "/emissiveTexture", material.emissiveTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Emissive)";
                 babylonMaterial.emissiveTexture = texture;
             }));
         }
@@ -5499,6 +5491,9 @@ var GLTFLoader = /** @class */ (function () {
         var texture = ArrayItem.Get(context + "/index", this._gltf.textures, textureInfo.index);
         var promise = this._loadTextureAsync("/textures/" + textureInfo.index, texture, function (babylonTexture) {
             babylonTexture.coordinatesIndex = textureInfo.texCoord || 0;
+            if (texture.name) {
+                babylonTexture.name = texture.name;
+            }
             GLTFLoader.AddPointerMetadata(babylonTexture, context);
             _this._parent.onTextureLoadedObservable.notifyObservers(babylonTexture);
             assign(babylonTexture);

Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 1 - 1
dist/preview release/loaders/babylon.glTFFileLoader.js.map


Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 1 - 1
dist/preview release/loaders/babylon.glTFFileLoader.min.js


+ 3 - 8
dist/preview release/loaders/babylonjs.loaders.js

@@ -4458,13 +4458,11 @@ var KHR_materials_pbrSpecularGlossiness = /** @class */ (function () {
         babylonMaterial.microSurface = properties.glossinessFactor == undefined ? 1 : properties.glossinessFactor;
         if (properties.diffuseTexture) {
             promises.push(this._loader.loadTextureInfoAsync(context + "/diffuseTexture", properties.diffuseTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Diffuse)";
                 babylonMaterial.albedoTexture = texture;
             }));
         }
         if (properties.specularGlossinessTexture) {
             promises.push(this._loader.loadTextureInfoAsync(context + "/specularGlossinessTexture", properties.specularGlossinessTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Specular Glossiness)";
                 babylonMaterial.reflectivityTexture = texture;
             }));
             babylonMaterial.reflectivityTexture.hasAlpha = true;
@@ -4537,7 +4535,6 @@ var KHR_materials_unlit = /** @class */ (function () {
             }
             if (properties.baseColorTexture) {
                 promises.push(this._loader.loadTextureInfoAsync(context + "/baseColorTexture", properties.baseColorTexture, function (texture) {
-                    texture.name = babylonMaterial.name + " (Base Color)";
                     babylonMaterial.albedoTexture = texture;
                 }));
             }
@@ -6640,13 +6637,11 @@ var GLTFLoader = /** @class */ (function () {
             babylonMaterial.roughness = properties.roughnessFactor == undefined ? 1 : properties.roughnessFactor;
             if (properties.baseColorTexture) {
                 promises.push(this.loadTextureInfoAsync(context + "/baseColorTexture", properties.baseColorTexture, function (texture) {
-                    texture.name = babylonMaterial.name + " (Base Color)";
                     babylonMaterial.albedoTexture = texture;
                 }));
             }
             if (properties.metallicRoughnessTexture) {
                 promises.push(this.loadTextureInfoAsync(context + "/metallicRoughnessTexture", properties.metallicRoughnessTexture, function (texture) {
-                    texture.name = babylonMaterial.name + " (Metallic Roughness)";
                     babylonMaterial.metallicTexture = texture;
                 }));
                 babylonMaterial.useMetallnessFromMetallicTextureBlue = true;
@@ -6757,7 +6752,6 @@ var GLTFLoader = /** @class */ (function () {
         }
         if (material.normalTexture) {
             promises.push(this.loadTextureInfoAsync(context + "/normalTexture", material.normalTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Normal)";
                 babylonMaterial.bumpTexture = texture;
             }));
             babylonMaterial.invertNormalMapX = !this._babylonScene.useRightHandedSystem;
@@ -6769,7 +6763,6 @@ var GLTFLoader = /** @class */ (function () {
         }
         if (material.occlusionTexture) {
             promises.push(this.loadTextureInfoAsync(context + "/occlusionTexture", material.occlusionTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Occlusion)";
                 babylonMaterial.ambientTexture = texture;
             }));
             babylonMaterial.useAmbientInGrayScale = true;
@@ -6779,7 +6772,6 @@ var GLTFLoader = /** @class */ (function () {
         }
         if (material.emissiveTexture) {
             promises.push(this.loadTextureInfoAsync(context + "/emissiveTexture", material.emissiveTexture, function (texture) {
-                texture.name = babylonMaterial.name + " (Emissive)";
                 babylonMaterial.emissiveTexture = texture;
             }));
         }
@@ -6841,6 +6833,9 @@ var GLTFLoader = /** @class */ (function () {
         var texture = ArrayItem.Get(context + "/index", this._gltf.textures, textureInfo.index);
         var promise = this._loadTextureAsync("/textures/" + textureInfo.index, texture, function (babylonTexture) {
             babylonTexture.coordinatesIndex = textureInfo.texCoord || 0;
+            if (texture.name) {
+                babylonTexture.name = texture.name;
+            }
             GLTFLoader.AddPointerMetadata(babylonTexture, context);
             _this._parent.onTextureLoadedObservable.notifyObservers(babylonTexture);
             assign(babylonTexture);

Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 1 - 1
dist/preview release/loaders/babylonjs.loaders.js.map


Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 2 - 2
dist/preview release/loaders/babylonjs.loaders.min.js


+ 3 - 3
dist/preview release/loaders/package.json

@@ -4,7 +4,7 @@
     },
     "name": "babylonjs-loaders",
     "description": "The Babylon.js file loaders library is an extension you can use to load different 3D file types into a Babylon scene.",
-    "version": "4.1.0-alpha.6",
+    "version": "4.1.0-alpha.7",
     "repository": {
         "type": "git",
         "url": "https://github.com/BabylonJS/Babylon.js.git"
@@ -28,8 +28,8 @@
     ],
     "license": "Apache-2.0",
     "dependencies": {
-        "babylonjs-gltf2interface": "4.1.0-alpha.6",
-        "babylonjs": "4.1.0-alpha.6"
+        "babylonjs-gltf2interface": "4.1.0-alpha.7",
+        "babylonjs": "4.1.0-alpha.7"
     },
     "engines": {
         "node": "*"

+ 2 - 2
dist/preview release/materialsLibrary/package.json

@@ -4,7 +4,7 @@
     },
     "name": "babylonjs-materials",
     "description": "The Babylon.js materials library is a collection of advanced materials to be used in a Babylon.js scene.",
-    "version": "4.1.0-alpha.6",
+    "version": "4.1.0-alpha.7",
     "repository": {
         "type": "git",
         "url": "https://github.com/BabylonJS/Babylon.js.git"
@@ -28,7 +28,7 @@
     ],
     "license": "Apache-2.0",
     "dependencies": {
-        "babylonjs": "4.1.0-alpha.6"
+        "babylonjs": "4.1.0-alpha.7"
     },
     "engines": {
         "node": "*"

+ 2 - 2
dist/preview release/nodeEditor/package.json

@@ -4,14 +4,14 @@
     },
     "name": "babylonjs-node-editor",
     "description": "The Babylon.js node material editor.",
-    "version": "4.1.0-alpha.6",
+    "version": "4.1.0-alpha.7",
     "repository": {
         "type": "git",
         "url": "https://github.com/BabylonJS/Babylon.js.git"
     },
     "license": "Apache-2.0",
     "dependencies": {
-        "babylonjs": "4.1.0-alpha.6"
+        "babylonjs": "4.1.0-alpha.7"
     },
     "files": [
         "babylon.nodeEditor.max.js.map",

+ 1 - 1
dist/preview release/package.json

@@ -7,7 +7,7 @@
     ],
     "name": "babylonjs",
     "description": "Babylon.js is a JavaScript 3D engine based on webgl.",
-    "version": "4.1.0-alpha.6",
+    "version": "4.1.0-alpha.7",
     "repository": {
         "type": "git",
         "url": "https://github.com/BabylonJS/Babylon.js.git"

+ 1 - 1
dist/preview release/packagesSizeBaseLine.json

@@ -1 +1 @@
-{"engineOnly":252117,"sceneOnly":510292,"minGridMaterial":639202,"minStandardMaterial":765213}
+{"engineOnly":252171,"sceneOnly":510346,"minGridMaterial":639256,"minStandardMaterial":765323}

+ 2 - 2
dist/preview release/postProcessesLibrary/package.json

@@ -4,7 +4,7 @@
     },
     "name": "babylonjs-post-process",
     "description": "The Babylon.js materials library is a collection of advanced materials to be used in a Babylon.js scene.",
-    "version": "4.1.0-alpha.6",
+    "version": "4.1.0-alpha.7",
     "repository": {
         "type": "git",
         "url": "https://github.com/BabylonJS/Babylon.js.git"
@@ -28,7 +28,7 @@
     ],
     "license": "Apache-2.0",
     "dependencies": {
-        "babylonjs": "4.1.0-alpha.6"
+        "babylonjs": "4.1.0-alpha.7"
     },
     "engines": {
         "node": "*"

+ 2 - 2
dist/preview release/proceduralTexturesLibrary/package.json

@@ -4,7 +4,7 @@
     },
     "name": "babylonjs-procedural-textures",
     "description": "The Babylon.js materials library is a collection of advanced materials to be used in a Babylon.js scene.",
-    "version": "4.1.0-alpha.6",
+    "version": "4.1.0-alpha.7",
     "repository": {
         "type": "git",
         "url": "https://github.com/BabylonJS/Babylon.js.git"
@@ -28,7 +28,7 @@
     ],
     "license": "Apache-2.0",
     "dependencies": {
-        "babylonjs": "4.1.0-alpha.6"
+        "babylonjs": "4.1.0-alpha.7"
     },
     "engines": {
         "node": "*"

+ 3 - 3
dist/preview release/serializers/package.json

@@ -4,7 +4,7 @@
     },
     "name": "babylonjs-serializers",
     "description": "The Babylon.js serializers library is an extension you can use to serialize Babylon scenes.",
-    "version": "4.1.0-alpha.6",
+    "version": "4.1.0-alpha.7",
     "repository": {
         "type": "git",
         "url": "https://github.com/BabylonJS/Babylon.js.git"
@@ -28,8 +28,8 @@
     ],
     "license": "Apache-2.0",
     "dependencies": {
-        "babylonjs": "4.1.0-alpha.6",
-        "babylonjs-gltf2interface": "4.1.0-alpha.6"
+        "babylonjs": "4.1.0-alpha.7",
+        "babylonjs-gltf2interface": "4.1.0-alpha.7"
     },
     "engines": {
         "node": "*"

Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 595 - 713
dist/preview release/viewer/babylon.module.d.ts


Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 31 - 27
dist/preview release/viewer/babylon.viewer.js


Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 2 - 2
dist/preview release/viewer/babylon.viewer.max.js


+ 7 - 1
dist/preview release/what's new.md

@@ -21,7 +21,10 @@
 - Method to check if device orientation is available ([TrevorDev](https://github.com/TrevorDev))
 - Added support for sound sprites [Doc](https://doc.babylonjs.com/how_to/playing_sounds_and_music#playing-a-sound-sprite) ([Deltakosh](https://github.com/deltakosh/))
 - Display Oculus Quest controller when using a Quest in WebVR ([TrevorDev](https://github.com/TrevorDev))
-- Added startAndReleaseDragOnPointerEvents property to pointerDragBehavior which can be set to false when using custom drag triggering ([TrevorDev](https://github.com/TrevorDev))
+- Added startAndReleaseDragOnPointerEvents property to pointerDragBehavior which can be set to false for custom drag triggering ([TrevorDev](https://github.com/TrevorDev))
+- Effect renderer to render one or multiple shader effects to a texture ([TrevorDev](https://github.com/TrevorDev))
+- Added url parameters to web request modifiers ([PierreLeBlond](https://github.com/PierreLeBlond))
+- WebXR updated to spec as of June 27th ([TrevorDev](https://github.com/TrevorDev))
 
 ### Engine
 - Morph targets now can morph UV channel as well ([Deltakosh](https://github.com/deltakosh/))
@@ -72,6 +75,7 @@
 - Added a note on shallow bounding of getBoundingInfo ([tibotiber](https://github.com/tibotiber))
 
 ## Bug fixes
+- Fixed Xbox One gamepad controller button schemes ([MackeyK24](https://github.com/MackeyK24/))
 - Added support for `AnimationGroup` serialization ([Drigax](https://github.com/drigax/))
 - Removing assetContainer from scene will also remove gui layers ([TrevorDev](https://github.com/TrevorDev))
 - A scene's input manager not adding key listeners when the canvas is already focused ([Poolminer](https://github.com/Poolminer))
@@ -84,6 +88,8 @@
 - Fix bug when adding and removing observers in quick succession ([sable](https://github.com/thscott))
 - Cannon and Ammo forceUpdate will no longer cause an unexpected exception ([TrevorDev](https://github.com/TrevorDev))
 - Loading the same multi-material twice and disposing one should not impact the other ([TrevorDev](https://github.com/TrevorDev))
+- GLTF loader should now preserve the texture naming ([Drigax](https://github.com/Drigax))
+- Avoid exception when disposing of Ammo cloth physics ([TrevorDev](https://github.com/TrevorDev))
 
 ## Breaking changes
 - Setting mesh.scaling to a new vector will no longer automatically call forceUpdate (this should be done manually when needed) ([TrevorDev](https://github.com/TrevorDev))

+ 0 - 2
loaders/src/glTF/2.0/Extensions/KHR_materials_pbrSpecularGlossiness.ts

@@ -73,14 +73,12 @@ export class KHR_materials_pbrSpecularGlossiness implements IGLTFLoaderExtension
 
         if (properties.diffuseTexture) {
             promises.push(this._loader.loadTextureInfoAsync(`${context}/diffuseTexture`, properties.diffuseTexture, (texture) => {
-                texture.name = `${babylonMaterial.name} (Diffuse)`;
                 babylonMaterial.albedoTexture = texture;
             }));
         }
 
         if (properties.specularGlossinessTexture) {
             promises.push(this._loader.loadTextureInfoAsync(`${context}/specularGlossinessTexture`, properties.specularGlossinessTexture, (texture) => {
-                texture.name = `${babylonMaterial.name} (Specular Glossiness)`;
                 babylonMaterial.reflectivityTexture = texture;
             }));
 

+ 0 - 1
loaders/src/glTF/2.0/Extensions/KHR_materials_unlit.ts

@@ -58,7 +58,6 @@ export class KHR_materials_unlit implements IGLTFLoaderExtension {
 
             if (properties.baseColorTexture) {
                 promises.push(this._loader.loadTextureInfoAsync(`${context}/baseColorTexture`, properties.baseColorTexture, (texture) => {
-                    texture.name = `${babylonMaterial.name} (Base Color)`;
                     babylonMaterial.albedoTexture = texture;
                 }));
             }

+ 4 - 5
loaders/src/glTF/2.0/glTFLoader.ts

@@ -1555,14 +1555,12 @@ export class GLTFLoader implements IGLTFLoader {
 
             if (properties.baseColorTexture) {
                 promises.push(this.loadTextureInfoAsync(`${context}/baseColorTexture`, properties.baseColorTexture, (texture) => {
-                    texture.name = `${babylonMaterial.name} (Base Color)`;
                     babylonMaterial.albedoTexture = texture;
                 }));
             }
 
             if (properties.metallicRoughnessTexture) {
                 promises.push(this.loadTextureInfoAsync(`${context}/metallicRoughnessTexture`, properties.metallicRoughnessTexture, (texture) => {
-                    texture.name = `${babylonMaterial.name} (Metallic Roughness)`;
                     babylonMaterial.metallicTexture = texture;
                 }));
 
@@ -1699,7 +1697,6 @@ export class GLTFLoader implements IGLTFLoader {
 
         if (material.normalTexture) {
             promises.push(this.loadTextureInfoAsync(`${context}/normalTexture`, material.normalTexture, (texture) => {
-                texture.name = `${babylonMaterial.name} (Normal)`;
                 babylonMaterial.bumpTexture = texture;
             }));
 
@@ -1714,7 +1711,6 @@ export class GLTFLoader implements IGLTFLoader {
 
         if (material.occlusionTexture) {
             promises.push(this.loadTextureInfoAsync(`${context}/occlusionTexture`, material.occlusionTexture, (texture) => {
-                texture.name = `${babylonMaterial.name} (Occlusion)`;
                 babylonMaterial.ambientTexture = texture;
             }));
 
@@ -1726,7 +1722,6 @@ export class GLTFLoader implements IGLTFLoader {
 
         if (material.emissiveTexture) {
             promises.push(this.loadTextureInfoAsync(`${context}/emissiveTexture`, material.emissiveTexture, (texture) => {
-                texture.name = `${babylonMaterial.name} (Emissive)`;
                 babylonMaterial.emissiveTexture = texture;
             }));
         }
@@ -1792,6 +1787,10 @@ export class GLTFLoader implements IGLTFLoader {
         const texture = ArrayItem.Get(`${context}/index`, this._gltf.textures, textureInfo.index);
         const promise = this._loadTextureAsync(`/textures/${textureInfo.index}`, texture, (babylonTexture) => {
             babylonTexture.coordinatesIndex = textureInfo.texCoord || 0;
+            if (texture.name)
+            {
+                babylonTexture.name = texture.name;
+            }
 
             GLTFLoader.AddPointerMetadata(babylonTexture, context);
             this._parent.onTextureLoadedObservable.notifyObservers(babylonTexture);

+ 1 - 1
package.json

@@ -7,7 +7,7 @@
     ],
     "name": "babylonjs",
     "description": "Babylon.js is a JavaScript 3D engine based on webgl.",
-    "version": "4.1.0-alpha.6",
+    "version": "4.1.0-alpha.7",
     "repository": {
         "type": "git",
         "url": "https://github.com/BabylonJS/Babylon.js.git"

+ 1 - 1
src/Audio/audioSceneComponent.ts

@@ -398,7 +398,7 @@ export class AudioSceneComponent implements ISceneSerializableComponent {
         }
 
         if (listeningCamera && audioEngine.audioContext) {
-            audioEngine.audioContext.listener.setPosition(listeningCamera.position.x, listeningCamera.position.y, listeningCamera.position.z);
+            audioEngine.audioContext.listener.setPosition(listeningCamera.globalPosition.x, listeningCamera.globalPosition.y, listeningCamera.globalPosition.z);
             // for VR cameras
             if (listeningCamera.rigCameras && listeningCamera.rigCameras.length > 0) {
                 listeningCamera = listeningCamera.rigCameras[0];

+ 16 - 14
src/Cameras/XR/webXRCamera.ts

@@ -64,16 +64,16 @@ export class WebXRCamera extends FreeCamera {
      */
     public updateFromXRSessionManager(xrSessionManager: WebXRSessionManager) {
         // Ensure all frame data is available
-        if (!xrSessionManager._currentXRFrame || !xrSessionManager._currentXRFrame.getDevicePose) {
+        if (!xrSessionManager.currentFrame || !xrSessionManager.currentFrame.getViewerPose) {
             return false;
         }
-        var pose = xrSessionManager._currentXRFrame.getDevicePose(xrSessionManager._frameOfReference);
-        if (!pose || !pose.poseModelMatrix) {
+        var pose = xrSessionManager.currentFrame.getViewerPose(xrSessionManager.referenceSpace);
+        if (!pose || !pose.transform || !pose.transform.matrix) {
             return false;
         }
 
         // Update the parent cameras matrix
-        Matrix.FromFloat32ArrayToRefScaled(pose.poseModelMatrix, 0, 1, WebXRCamera._TmpMatrix);
+        Matrix.FromFloat32ArrayToRefScaled(pose.transform.matrix, 0, 1, WebXRCamera._TmpMatrix);
         if (!this._scene.useRightHandedSystem) {
             WebXRCamera._TmpMatrix.toggleModelMatrixHandInPlace();
         }
@@ -83,10 +83,10 @@ export class WebXRCamera extends FreeCamera {
         this.computeWorldMatrix();
 
         // Update camera rigs
-        this._updateNumberOfRigCameras(xrSessionManager._currentXRFrame.views.length);
-        xrSessionManager._currentXRFrame.views.forEach((view, i) => {
+        this._updateNumberOfRigCameras(pose.views.length);
+        pose.views.forEach((view: any, i: number) => {
             // Update view/projection matrix
-            Matrix.FromFloat32ArrayToRefScaled(pose.getViewMatrix(view), 0, 1, this.rigCameras[i]._computedViewMatrix);
+            Matrix.FromFloat32ArrayToRefScaled(view.transform.matrix, 0, 1, this.rigCameras[i]._computedViewMatrix);
             Matrix.FromFloat32ArrayToRefScaled(view.projectionMatrix, 0, 1, this.rigCameras[i]._projectionMatrix);
             if (!this._scene.useRightHandedSystem) {
                 this.rigCameras[i]._computedViewMatrix.toggleModelMatrixHandInPlace();
@@ -94,13 +94,15 @@ export class WebXRCamera extends FreeCamera {
             }
 
             // Update viewport
-            var viewport = xrSessionManager._xrSession.baseLayer.getViewport(view);
-            var width = xrSessionManager._xrSession.baseLayer.framebufferWidth;
-            var height = xrSessionManager._xrSession.baseLayer.framebufferHeight;
-            this.rigCameras[i].viewport.width = viewport.width / width;
-            this.rigCameras[i].viewport.height = viewport.height / height;
-            this.rigCameras[i].viewport.x = viewport.x / width;
-            this.rigCameras[i].viewport.y = viewport.y / height;
+            if (xrSessionManager.session.renderState.baseLayer) {
+                var viewport = xrSessionManager.session.renderState.baseLayer.getViewport(view);
+                var width = xrSessionManager.session.renderState.baseLayer.framebufferWidth;
+                var height = xrSessionManager.session.renderState.baseLayer.framebufferHeight;
+                this.rigCameras[i].viewport.width = viewport.width / width;
+                this.rigCameras[i].viewport.height = viewport.height / height;
+                this.rigCameras[i].viewport.x = viewport.x / width;
+                this.rigCameras[i].viewport.y = viewport.y / height;
+            }
 
             // Set cameras to render to the session's render target
             this.rigCameras[i].outputRenderTarget = xrSessionManager._sessionRenderTargetTexture;

+ 13 - 16
src/Cameras/XR/webXREnterExitUI.ts

@@ -2,6 +2,7 @@ import { Nullable } from "../../types";
 import { Observable } from "../../Misc/observable";
 import { IDisposable, Scene } from "../../scene";
 import { WebXRExperienceHelper, WebXRState } from "./webXRExperienceHelper";
+import { WebXRManagedOutputCanvas } from '../XR/webXRManagedOutputCanvas';
 /**
  * Button which can be used to enter a different mode of XR
  */
@@ -9,13 +10,16 @@ export class WebXREnterExitUIButton {
     /**
      * Creates a WebXREnterExitUIButton
      * @param element button element
-     * @param initializationOptions XR initialization options for the button
+     * @param sessionMode XR initialization session mode
+     * @param referenceSpaceType the type of reference space to be used
      */
     constructor(
         /** button element */
         public element: HTMLElement,
         /** XR initialization options for the button */
-        public initializationOptions: XRSessionCreationOptions
+        public sessionMode: XRSessionMode,
+        /** Reference space type */
+        public referenceSpaceType: XRReferenceSpaceType
     ) { }
     /**
      * Overwritable function which can be used to update the button's visuals when the state changes
@@ -32,7 +36,7 @@ export class WebXREnterExitUIOptions {
     /**
      * Context to enter xr with
      */
-    outputCanvasContext?: Nullable<WebGLRenderingContext>;
+    webXRManagedOutputCanvas?: Nullable<WebXRManagedOutputCanvas>;
 
     /**
      * User provided buttons to enable/disable WebXR. The system will provide default if not set
@@ -64,7 +68,7 @@ export class WebXREnterExitUI implements IDisposable {
     public static CreateAsync(scene: Scene, helper: WebXRExperienceHelper, options: WebXREnterExitUIOptions): Promise<WebXREnterExitUI> {
         var ui = new WebXREnterExitUI(scene, options);
         var supportedPromises = ui._buttons.map((btn) => {
-            return helper.supportsSessionAsync(btn.initializationOptions);
+            return helper.sessionManager.supportsSessionAsync(btn.sessionMode);
         });
         helper.onStateChangedObservable.add((state) => {
             if (state == WebXRState.NOT_IN_XR) {
@@ -82,7 +86,9 @@ export class WebXREnterExitUI implements IDisposable {
                             return;
                         } else if (helper.state == WebXRState.NOT_IN_XR) {
                             ui._updateButtons(ui._buttons[i]);
-                            await helper.enterXRAsync(ui._buttons[i].initializationOptions, "eye-level");
+                            if (options.webXRManagedOutputCanvas) {
+                                await helper.enterXRAsync(ui._buttons[i].sessionMode, ui._buttons[i].referenceSpaceType, options.webXRManagedOutputCanvas);
+                            }
                         }
                     };
                 }
@@ -101,20 +107,11 @@ export class WebXREnterExitUI implements IDisposable {
             var hmdBtn = document.createElement("button");
             hmdBtn.style.cssText = "color: #868686; border-color: #868686; border-style: solid; margin-left: 10px; height: 50px; width: 80px; background-color: rgba(51,51,51,0.7); background-repeat:no-repeat; background-position: center; outline: none;";
             hmdBtn.innerText = "HMD";
-            this._buttons.push(new WebXREnterExitUIButton(hmdBtn, { immersive: true, outputContext: options.outputCanvasContext }));
+            this._buttons.push(new WebXREnterExitUIButton(hmdBtn, "immersive-vr", "local-floor"));
             this._buttons[this._buttons.length - 1].update = function(activeButton: WebXREnterExitUIButton) {
                 this.element.style.display = (activeButton === null || activeButton === this) ? "" : "none";
                 this.element.innerText = activeButton === this ? "EXIT" : "HMD";
             };
-
-            var windowBtn = document.createElement("button");
-            windowBtn.style.cssText = hmdBtn.style.cssText;
-            windowBtn.innerText = "Window";
-            this._buttons.push(new WebXREnterExitUIButton(windowBtn, { immersive: false, environmentIntegration: true, outputContext: options.outputCanvasContext }));
-            this._buttons[this._buttons.length - 1].update = function(activeButton: WebXREnterExitUIButton) {
-                this.element.style.display = (activeButton === null || activeButton === this) ? "" : "none";
-                this.element.innerText = activeButton === this ? "EXIT" : "Window";
-            };
             this._updateButtons(null);
         }
 
@@ -145,4 +142,4 @@ export class WebXREnterExitUI implements IDisposable {
         }
         this.activeButtonChangedObservable.clear();
     }
-}
+}

+ 27 - 37
src/Cameras/XR/webXRExperienceHelper.ts

@@ -3,10 +3,10 @@ import { Observable } from "../../Misc/observable";
 import { IDisposable, Scene } from "../../scene";
 import { Quaternion, Vector3 } from "../../Maths/math";
 import { AbstractMesh } from "../../Meshes/abstractMesh";
-import { Ray } from "../../Culling/ray";
 import { Camera } from "../../Cameras/camera";
 import { WebXRSessionManager } from "./webXRSessionManager";
 import { WebXRCamera } from "./webXRCamera";
+import { WebXRManagedOutputCanvas } from './webXRManagedOutputCanvas';
 /**
  * States of the webXR experience
  */
@@ -59,8 +59,8 @@ export class WebXRExperienceHelper implements IDisposable {
      */
     public onStateChangedObservable = new Observable<WebXRState>();
 
-    /** @hidden */
-    public _sessionManager: WebXRSessionManager;
+    /** Session manager used to keep track of xr session */
+    public sessionManager: WebXRSessionManager;
 
     private _nonVRCamera: Nullable<Camera> = null;
     private _originalSceneAutoClear = true;
@@ -74,7 +74,7 @@ export class WebXRExperienceHelper implements IDisposable {
      */
     public static CreateAsync(scene: Scene): Promise<WebXRExperienceHelper> {
         var helper = new WebXRExperienceHelper(scene);
-        return helper._sessionManager.initializeAsync().then(() => {
+        return helper.sessionManager.initializeAsync().then(() => {
             helper._supported = true;
             return helper;
         }).catch(() => {
@@ -88,7 +88,7 @@ export class WebXRExperienceHelper implements IDisposable {
      */
     private constructor(private scene: Scene) {
         this.camera = new WebXRCamera("", scene);
-        this._sessionManager = new WebXRSessionManager(scene);
+        this.sessionManager = new WebXRSessionManager(scene);
         this.container = new AbstractMesh("", scene);
         this.camera.parent = this.container;
     }
@@ -99,22 +99,30 @@ export class WebXRExperienceHelper implements IDisposable {
      */
     public exitXRAsync() {
         this._setState(WebXRState.EXITING_XR);
-        return this._sessionManager.exitXRAsync();
+        return this.sessionManager.exitXRAsync();
     }
 
     /**
      * Enters XR mode (This must be done within a user interaction in most browsers eg. button click)
      * @param sessionCreationOptions options for the XR session
-     * @param frameOfReference frame of reference of the XR session
+     * @param referenceSpaceType frame of reference of the XR session
+     * @param outputCanvas the output canvas that will be used to enter XR mode
      * @returns promise that resolves after xr mode has entered
      */
-    public enterXRAsync(sessionCreationOptions: XRSessionCreationOptions, frameOfReference: string) {
+    public enterXRAsync(sessionCreationOptions: XRSessionMode, referenceSpaceType: XRReferenceSpaceType, outputCanvas: WebXRManagedOutputCanvas) {
         if (!this._supported) {
             throw "XR session not supported by this browser";
         }
         this._setState(WebXRState.ENTERING_XR);
-
-        return this._sessionManager.enterXRAsync(sessionCreationOptions, frameOfReference).then(() => {
+        return this.sessionManager.initializeSessionAsync(sessionCreationOptions).then(() => {
+            return this.sessionManager.setReferenceSpaceAsync(referenceSpaceType);
+        }).then(() => {
+            return outputCanvas.initializeXRLayerAsync(this.sessionManager.session);
+        }).then(() => {
+            return this.sessionManager.updateRenderStateAsync({baseLayer: outputCanvas.xrLayer});
+        }).then(() => {
+            return this.sessionManager.startRenderingToXRAsync();
+        }).then(() => {
             // Cache pre xr scene settings
             this._originalSceneAutoClear = this.scene.autoClear;
             this._nonVRCamera = this.scene.activeCamera;
@@ -123,11 +131,11 @@ export class WebXRExperienceHelper implements IDisposable {
             this.scene.autoClear = false;
             this.scene.activeCamera = this.camera;
 
-            this._sessionManager.onXRFrameObservable.add(() => {
-                this.camera.updateFromXRSessionManager(this._sessionManager);
+            this.sessionManager.onXRFrameObservable.add(() => {
+                this.camera.updateFromXRSessionManager(this.sessionManager);
             });
 
-            this._sessionManager.onXRSessionEnded.addOnce(() => {
+            this.sessionManager.onXRSessionEnded.addOnce(() => {
                 // Reset camera rigs output render target to ensure sessions render target is not drawn after it ends
                 this.camera.rigCameras.forEach((c) => {
                     c.outputRenderTarget = null;
@@ -136,24 +144,18 @@ export class WebXRExperienceHelper implements IDisposable {
                 // Restore scene settings
                 this.scene.autoClear = this._originalSceneAutoClear;
                 this.scene.activeCamera = this._nonVRCamera;
-                this._sessionManager.onXRFrameObservable.clear();
+                this.sessionManager.onXRFrameObservable.clear();
 
                 this._setState(WebXRState.NOT_IN_XR);
             });
             this._setState(WebXRState.IN_XR);
+        }).catch((e: any) => {
+            console.log(e);
+            console.log(e.message);
         });
     }
 
     /**
-     * Fires a ray and returns the closest hit in the xr sessions enviornment, useful to place objects in AR
-     * @param ray ray to cast into the environment
-     * @returns Promise which resolves with a collision point in the environment if it exists
-     */
-    public environmentPointHitTestAsync(ray: Ray): Promise<Nullable<Vector3>> {
-        return this._sessionManager.environmentPointHitTestAsync(ray);
-    }
-
-    /**
      * Updates the global position of the camera by moving the camera's container
      * This should be used instead of modifying the camera's position as it will be overwritten by an xrSessions's update frame
      * @param position The desired global position of the camera
@@ -177,24 +179,12 @@ export class WebXRExperienceHelper implements IDisposable {
     }
 
     /**
-     * Checks if the creation options are supported by the xr session
-     * @param options creation options
-     * @returns true if supported
-     */
-    public supportsSessionAsync(options: XRSessionCreationOptions) {
-        if (!this._supported) {
-            return Promise.resolve(false);
-        }
-        return this._sessionManager.supportsSessionAsync(options);
-    }
-
-    /**
      * Disposes of the experience helper
      */
     public dispose() {
         this.camera.dispose();
         this.container.dispose();
         this.onStateChangedObservable.clear();
-        this._sessionManager.dispose();
+        this.sessionManager.dispose();
     }
-}
+}

+ 114 - 44
src/Cameras/XR/webXRInput.ts

@@ -1,9 +1,9 @@
 import { Nullable } from "../../types";
-import { Observer } from "../../Misc/observable";
-import { Matrix, Quaternion } from "../../Maths/math";
+import { Observer, Observable } from "../../Misc/observable";
 import { IDisposable, Scene } from "../../scene";
 import { AbstractMesh } from "../../Meshes/abstractMesh";
 import { WebXRExperienceHelper } from "./webXRExperienceHelper";
+import { Matrix, Quaternion } from '../../Maths/math';
 /**
  * Represents an XR input
  */
@@ -17,14 +17,69 @@ export class WebXRController {
      */
     public pointer: AbstractMesh;
 
+    private _tmpMatrix = new Matrix();
+
     /**
      * Creates the controller
      * @see https://doc.babylonjs.com/how_to/webxr
      * @param scene the scene which the controller should be associated to
+     * @param inputSource the underlying input source for the controller
+     * @param parentContainer parent that the controller meshes should be children of
      */
-    constructor(scene: Scene) {
+    constructor(
+        private scene: Scene,
+        /** The underlying input source for the controller  */
+        public inputSource: XRInputSource,
+        private parentContainer: Nullable<AbstractMesh> = null)
+    {
         this.pointer = new AbstractMesh("controllerPointer", scene);
+        if (parentContainer) {
+            parentContainer.addChild(this.pointer);
+
+        }
     }
+
+    /**
+     * Updates the controller pose based on the given XRFrame
+     * @param xrFrame xr frame to update the pose with
+     * @param referenceSpace reference space to use
+     */
+    public updateFromXRFrame(xrFrame: XRFrame, referenceSpace: XRReferenceSpace) {
+        var pose = xrFrame.getPose(this.inputSource.targetRaySpace, referenceSpace);
+        if (pose) {
+            Matrix.FromFloat32ArrayToRefScaled(pose.transform.matrix, 0, 1, this._tmpMatrix);
+            if (!this.pointer.getScene().useRightHandedSystem) {
+                this._tmpMatrix.toggleModelMatrixHandInPlace();
+            }
+            if (!this.pointer.rotationQuaternion) {
+                this.pointer.rotationQuaternion = new Quaternion();
+            }
+            this._tmpMatrix.decompose(this.pointer.scaling, this.pointer.rotationQuaternion!, this.pointer.position);
+        }
+
+        if (this.inputSource.gripSpace) {
+            if (!this.grip) {
+                this.grip = new AbstractMesh("controllerGrip", this.scene);
+                if (this.parentContainer) {
+                    this.parentContainer.addChild(this.grip);
+                }
+            }
+
+            var pose = xrFrame.getPose(this.inputSource.gripSpace, referenceSpace);
+            if (pose) {
+                Matrix.FromFloat32ArrayToRefScaled(pose.transform.matrix, 0, 1, this._tmpMatrix);
+                if (!this.grip.getScene().useRightHandedSystem) {
+                    this._tmpMatrix.toggleModelMatrixHandInPlace();
+                }
+                if (!this.grip.rotationQuaternion) {
+                    this.grip.rotationQuaternion = new Quaternion();
+                }
+                this._tmpMatrix.decompose(this.grip.scaling, this.grip.rotationQuaternion!, this.grip.position);
+            }
+        }
+
+    }
+
     /**
      * Disposes of the object
      */
@@ -44,58 +99,73 @@ export class WebXRInput implements IDisposable {
      * XR controllers being tracked
      */
     public controllers: Array<WebXRController> = [];
-    private _tmpMatrix = new Matrix();
     private _frameObserver: Nullable<Observer<any>>;
+    /**
+     * Event when a controller has been connected/added
+     */
+    public onControllerAddedObservable = new Observable<WebXRController>();
+    /**
+     * Event when a controller has been removed/disconnected
+     */
+    public onControllerRemovedObservable = new Observable<WebXRController>();
 
     /**
      * Initializes the WebXRInput
      * @param helper experience helper which the input should be created for
      */
     public constructor(private helper: WebXRExperienceHelper) {
-        this._frameObserver = helper._sessionManager.onXRFrameObservable.add(() => {
-            if (!helper._sessionManager._currentXRFrame || !helper._sessionManager._currentXRFrame.getDevicePose) {
+        this._frameObserver = helper.sessionManager.onXRFrameObservable.add(() => {
+            if (!helper.sessionManager.currentFrame) {
                 return;
             }
 
-            var xrFrame = helper._sessionManager._currentXRFrame;
-            var inputSources = helper._sessionManager._xrSession.getInputSources();
-
-            inputSources.forEach((input, i) => {
-                let inputPose = xrFrame.getInputPose(input, helper._sessionManager._frameOfReference);
-                if (inputPose) {
-                    if (this.controllers.length <= i) {
-                        this.controllers.push(new WebXRController(helper.container.getScene()));
-                    }
-                    var controller = this.controllers[i];
-
-                    // Manage the grip if it exists
-                    if (inputPose.gripMatrix) {
-                        if (!controller.grip) {
-                            controller.grip = new AbstractMesh("controllerGrip", helper.container.getScene());
-                        }
-                        Matrix.FromFloat32ArrayToRefScaled(inputPose.gripMatrix, 0, 1, this._tmpMatrix);
-                        if (!controller.grip.getScene().useRightHandedSystem) {
-                            this._tmpMatrix.toggleModelMatrixHandInPlace();
-                        }
-                        if (!controller.grip.rotationQuaternion) {
-                            controller.grip.rotationQuaternion = new Quaternion();
-                        }
-                        this._tmpMatrix.decompose(controller.grip.scaling, controller.grip.rotationQuaternion, controller.grip.position);
-                    }
-
-                    // Manager pointer of controller
-                    Matrix.FromFloat32ArrayToRefScaled(inputPose.targetRay.transformMatrix, 0, 1, this._tmpMatrix);
-                    if (!controller.pointer.getScene().useRightHandedSystem) {
-                        this._tmpMatrix.toggleModelMatrixHandInPlace();
-                    }
-                    if (!controller.pointer.rotationQuaternion) {
-                        controller.pointer.rotationQuaternion = new Quaternion();
-                    }
-                    this._tmpMatrix.decompose(controller.pointer.scaling, controller.pointer.rotationQuaternion, controller.pointer.position);
-                }
+            // Start listing to input add/remove event
+            if (this.controllers.length == 0 && helper.sessionManager.session.inputSources) {
+                this._addAndRemoveControllers(helper.sessionManager.session.inputSources, []);
+                helper.sessionManager.session.addEventListener("inputsourceschange", this._onInputSourcesChange);
+            }
+
+            // Update controller pose info
+            this.controllers.forEach((controller) => {
+                controller.updateFromXRFrame(helper.sessionManager.currentFrame!, helper.sessionManager.referenceSpace);
             });
+
+        });
+    }
+
+    private _onInputSourcesChange = (event: XRInputSourceChangeEvent) => {
+        this._addAndRemoveControllers(event.added, event.removed);
+    }
+
+    private _addAndRemoveControllers(addInputs: Array<XRInputSource>, removeInputs: Array<XRInputSource>) {
+        // Add controllers if they don't already exist
+        var sources = this.controllers.map((c) => {return c.inputSource; });
+        addInputs.forEach((input) => {
+            if (sources.indexOf(input) === -1) {
+                var controller = new WebXRController(this.helper.camera._scene, input, this.helper.container);
+                this.controllers.push(controller);
+                this.onControllerAddedObservable.notifyObservers(controller);
+            }
+        });
+
+        // Remove and dispose of controllers to be disposed
+        var keepControllers: Array<WebXRController> = [];
+        var removedControllers: Array<WebXRController> = [];
+        this.controllers.forEach((c) => {
+            if (removeInputs.indexOf(c.inputSource) === -1) {
+                keepControllers.push(c);
+            }else {
+                removedControllers.push(c);
+            }
         });
+        this.controllers = keepControllers;
+        removedControllers.forEach((c) => {
+            this.onControllerRemovedObservable.notifyObservers(c);
+            c.dispose();
+        });
+
     }
+
     /**
      * Disposes of the object
      */
@@ -103,6 +173,6 @@ export class WebXRInput implements IDisposable {
         this.controllers.forEach((c) => {
             c.dispose();
         });
-        this.helper._sessionManager.onXRFrameObservable.remove(this._frameObserver);
+        this.helper.sessionManager.onXRFrameObservable.remove(this._frameObserver);
     }
-}
+}

+ 27 - 8
src/Cameras/XR/webXRManagedOutputCanvas.ts

@@ -9,16 +9,32 @@ export class WebXRManagedOutputCanvas implements IDisposable {
     /**
      * xrpresent context of the canvas which can be used to display/mirror xr content
      */
-    public canvasContext: Nullable<WebGLRenderingContext> = null;
+    public canvasContext: WebGLRenderingContext;
+    /**
+     * xr layer for the canvas
+     */
+    public xrLayer: Nullable<XRWebGLLayer> = null;
+
+    /**
+     * Initializes the xr layer for the session
+     * @param xrSession xr session
+     * @returns a promise that will resolve once the XR Layer has been created
+     */
+    public initializeXRLayerAsync(xrSession: any) {
+        return (this.canvasContext as any).makeXRCompatible().then(() => {
+            this.xrLayer = new XRWebGLLayer(xrSession, this.canvasContext);
+        });
+    }
+
     /**
      * Initializes the canvas to be added/removed upon entering/exiting xr
      * @param helper the xr experience helper used to trigger adding/removing of the canvas
      * @param canvas The canvas to be added/removed (If not specified a full screen canvas will be created)
      */
-    public constructor(helper: WebXRExperienceHelper, canvas?: HTMLCanvasElement) {
+    constructor(private helper: WebXRExperienceHelper, canvas?: HTMLCanvasElement) {
         if (!canvas) {
             canvas = document.createElement('canvas');
-            canvas.style.cssText = "position:absolute; bottom:0px;right:0px;z-index:10;width:100%;height:100%;background-color: #000000;";
+            canvas.style.cssText = "position:absolute; bottom:0px;right:0px;z-index:10;width:90%;height:100%;background-color: #000000;";
         }
         this._setManagedOutputCanvas(canvas);
         helper.onStateChangedObservable.add((stateInfo) => {
@@ -42,22 +58,25 @@ export class WebXRManagedOutputCanvas implements IDisposable {
         this._removeCanvas();
         if (!canvas) {
             this._canvas = null;
-            this.canvasContext = null;
+            (this.canvasContext as any) = null;
         } else {
             this._canvas = canvas;
-            this.canvasContext = <any>this._canvas.getContext('xrpresent');
+            this.canvasContext = <any>this._canvas.getContext('webgl');
+            if (!this.canvasContext) {
+                this.canvasContext = <any>this._canvas.getContext('webgl2');
+            }
         }
     }
 
     private _addCanvas() {
-        if (this._canvas) {
+        if (this._canvas && this._canvas !== this.helper.container.getScene().getEngine().getRenderingCanvas()) {
             document.body.appendChild(this._canvas);
         }
     }
 
     private _removeCanvas() {
-        if (this._canvas && document.body.contains(this._canvas)) {
+        if (this._canvas && document.body.contains(this._canvas) && this._canvas !== this.helper.container.getScene().getEngine().getRenderingCanvas()) {
             document.body.removeChild(this._canvas);
         }
     }
-}
+}

+ 89 - 89
src/Cameras/XR/webXRSessionManager.ts

@@ -2,12 +2,10 @@ import { Logger } from "../../Misc/logger";
 import { Observable } from "../../Misc/observable";
 import { Nullable } from "../../types";
 import { IDisposable, Scene } from "../../scene";
-import { Vector3, Matrix } from "../../Maths/math";
 import { InternalTexture } from "../../Materials/Textures/internalTexture";
 import { RenderTargetTexture } from "../../Materials/Textures/renderTargetTexture";
-import { Ray } from "../../Culling/ray";
 /**
- * Manages an XRSession
+ * Manages an XRSession to work with Babylon's engine
  * @see https://doc.babylonjs.com/how_to/webxr
  */
 export class WebXRSessionManager implements IDisposable {
@@ -20,17 +18,25 @@ export class WebXRSessionManager implements IDisposable {
      */
     public onXRSessionEnded: Observable<any> = new Observable<any>();
 
-    /** @hidden */
-    public _xrSession: XRSession;
-    /** @hidden */
-    public _frameOfReference: XRFrameOfReference;
+    /**
+     * Underlying xr session
+     */
+    public session: XRSession;
+
+    /**
+     * Type of reference space used when creating the session
+     */
+    public referenceSpace: XRReferenceSpace;
+
     /** @hidden */
     public _sessionRenderTargetTexture: Nullable<RenderTargetTexture> = null;
-    /** @hidden */
-    public _currentXRFrame: Nullable<XRFrame>;
+
+    /**
+     * Current XR frame
+     */
+    public currentFrame: Nullable<XRFrame>;
     private _xrNavigator: any;
-    private _xrDevice: XRDevice;
-    private _tmpMatrix = new Matrix();
+    private baseLayer: Nullable<XRWebGLLayer> = null;
 
     /**
      * Constructs a WebXRSessionManager, this must be initialized within a user action before usage
@@ -52,26 +58,20 @@ export class WebXRSessionManager implements IDisposable {
         if (!this._xrNavigator.xr) {
             return Promise.reject("webXR not supported by this browser");
         }
-        // Request the webXR device
-        return this._xrNavigator.xr.requestDevice().then((device: XRDevice) => {
-            this._xrDevice = device;
-            return (<any>this.scene.getEngine()._gl).setCompatibleXRDevice(this._xrDevice);
-        });
+        return Promise.resolve();
     }
 
     /**
-     * Enters XR with the desired XR session options, this must be done with a user action (eg. button click event)
-     * @param sessionCreationOptions xr options to create the session with
-     * @param frameOfReferenceType option to configure how the xr pose is expressed
-     * @returns Promise which resolves after it enters XR
+     * Initializes an xr session
+     * @param xrSessionMode mode to initialize
+     * @returns a promise which will resolve once the session has been initialized
      */
-    public enterXRAsync(sessionCreationOptions: XRSessionCreationOptions, frameOfReferenceType: string): Promise<void> {
-        // initialize session
-        return this._xrDevice.requestSession(sessionCreationOptions).then((session: XRSession) => {
-            this._xrSession = session;
+    public initializeSessionAsync(xrSessionMode: XRSessionMode) {
+        return this._xrNavigator.xr.requestSession(xrSessionMode).then((session: XRSession) => {
+            this.session = session;
 
             // handle when the session is ended (By calling session.end or device ends its own session eg. pressing home button on phone)
-            this._xrSession.addEventListener("end", () => {
+            this.session.addEventListener("end", () => {
                 // Remove render target texture and notify frame obervers
                 this._sessionRenderTargetTexture = null;
 
@@ -83,83 +83,80 @@ export class WebXRSessionManager implements IDisposable {
                 this.onXRSessionEnded.notifyObservers(null);
                 this.scene.getEngine()._renderLoop();
             }, { once: true });
+        });
+    }
 
-            this._xrSession.baseLayer = new XRWebGLLayer(this._xrSession, this.scene.getEngine()._gl);
-            return this._xrSession.requestFrameOfReference(frameOfReferenceType);
-        }).then((frameOfRef: any) => {
-            this._frameOfReference = frameOfRef;
-            // Tell the engine's render loop to be driven by the xr session's refresh rate and provide xr pose information
-            this.scene.getEngine().customAnimationFrameRequester = {
-                requestAnimationFrame: this._xrSession.requestAnimationFrame.bind(this._xrSession),
-                renderFunction: (timestamp: number, xrFrame: Nullable<XRFrame>) => {
-                    // Store the XR frame in the manager to be consumed by the XR camera to update pose
-                    this._currentXRFrame = xrFrame;
-                    this.onXRFrameObservable.notifyObservers(null);
-                    this.scene.getEngine()._renderLoop();
-                }
-            };
-            // Create render target texture from xr's webgl render target
-            this._sessionRenderTargetTexture = WebXRSessionManager._CreateRenderTargetTextureFromSession(this._xrSession, this.scene);
-
-            // Stop window's animation frame and trigger sessions animation frame
-            window.cancelAnimationFrame(this.scene.getEngine()._frameHandler);
-            this.scene.getEngine()._renderLoop();
+    /**
+     * Sets the reference space on the xr session
+     * @param referenceSpace space to set
+     * @returns a promise that will resolve once the reference space has been set
+     */
+    public setReferenceSpaceAsync(referenceSpace: XRReferenceSpaceType) {
+        return this.session.requestReferenceSpace(referenceSpace).then((referenceSpace: XRReferenceSpace) => {
+            this.referenceSpace = referenceSpace;
         });
     }
 
     /**
-     * Stops the xrSession and restores the renderloop
-     * @returns Promise which resolves after it exits XR
+     * Updates the render state of the session
+     * @param state state to set
+     * @returns a promise that resolves once the render state has been updated
      */
-    public exitXRAsync() {
-        return this._xrSession.end();
+    public updateRenderStateAsync(state: any) {
+        if (state.baseLayer) {
+            this.baseLayer = state.baseLayer;
+        }
+        return this.session.updateRenderState(state);
     }
 
     /**
-     * Fires a ray and returns the closest hit in the xr sessions enviornment, useful to place objects in AR
-     * @param ray ray to cast into the environment
-     * @returns Promise which resolves with a collision point in the environment if it exists
+     * Starts rendering to the xr layer
+     * @returns a promise that will resolve once rendering has started
      */
-    public environmentPointHitTestAsync(ray: Ray): Promise<Nullable<Vector3>> {
-        return new Promise((res) => {
-            // Compute left handed inputs to request hit test
-            var origin = new Float32Array([ray.origin.x, ray.origin.y, ray.origin.z]);
-            var direction = new Float32Array([ray.direction.x, ray.direction.y, ray.direction.z]);
-            if (!this.scene.useRightHandedSystem) {
-                origin[2] *= -1;
-                direction[2] *= -1;
+    public startRenderingToXRAsync() {
+        // Tell the engine's render loop to be driven by the xr session's refresh rate and provide xr pose information
+        this.scene.getEngine().customAnimationFrameRequester = {
+            requestAnimationFrame: this.session.requestAnimationFrame.bind(this.session),
+            renderFunction: (timestamp: number, xrFrame: Nullable<XRFrame>) => {
+                // Store the XR frame in the manager to be consumed by the XR camera to update pose
+                this.currentFrame = xrFrame;
+                this.onXRFrameObservable.notifyObservers(null);
+                this.scene.getEngine()._renderLoop();
             }
+        };
+        // Create render target texture from xr's webgl render target
+        this._sessionRenderTargetTexture = WebXRSessionManager._CreateRenderTargetTextureFromSession(this.session, this.scene, this.baseLayer!);
+
+        // Stop window's animation frame and trigger sessions animation frame
+        window.cancelAnimationFrame(this.scene.getEngine()._frameHandler);
+        this.scene.getEngine()._renderLoop();
+        return Promise.resolve();
+    }
 
-            // Fire hittest
-            this._xrSession.requestHitTest(origin, direction, this._frameOfReference)
-                .then((hits: any) => {
-                    if (hits.length > 0) {
-                        Matrix.FromFloat32ArrayToRefScaled(hits[0].hitMatrix, 0, 1.0, this._tmpMatrix);
-                        var hitPoint = this._tmpMatrix.getTranslation();
-                        if (!this.scene.useRightHandedSystem) {
-                            hitPoint.z *= -1;
-                        }
-                        res(hitPoint);
-                    } else {
-                        res(null);
-                    }
-                }).catch(() => {
-                    res(null);
-                });
-        });
+    /**
+     * Stops the xrSession and restores the renderloop
+     * @returns Promise which resolves after it exits XR
+     */
+    public exitXRAsync() {
+        return this.session.end();
     }
 
     /**
      * Checks if a session would be supported for the creation options specified
-     * @param options creation options to check if they are supported
+     * @param sessionMode session mode to check if supported eg. immersive-vr
      * @returns true if supported
      */
-    public supportsSessionAsync(options: XRSessionCreationOptions) {
-        return this._xrDevice.supportsSession(options).then(() => {
-            return true;
-        }).catch(() => {
-            return false;
-        });
+    public supportsSessionAsync(sessionMode: XRSessionMode) {
+        if (!(navigator as any).xr || !(navigator as any).xr.supportsSession) {
+            return Promise.resolve(false);
+        }else {
+            return (navigator as any).xr.supportsSession(sessionMode).then(() => {
+                return Promise.resolve(true);
+            }).catch((e: any) => {
+                Logger.Warn(e);
+                return Promise.resolve(false);
+            });
+        }
     }
 
     /**
@@ -168,12 +165,15 @@ export class WebXRSessionManager implements IDisposable {
      * @param session session to create render target for
      * @param scene scene the new render target should be created for
      */
-    public static _CreateRenderTargetTextureFromSession(session: XRSession, scene: Scene) {
+    public static _CreateRenderTargetTextureFromSession(session: XRSession, scene: Scene, baseLayer: XRWebGLLayer) {
+        if (!baseLayer) {
+            throw "no layer";
+        }
         // Create internal texture
         var internalTexture = new InternalTexture(scene.getEngine(), InternalTexture.DATASOURCE_UNKNOWN, true);
-        internalTexture.width = session.baseLayer.framebufferWidth;
-        internalTexture.height = session.baseLayer.framebufferHeight;
-        internalTexture._framebuffer = session.baseLayer.framebuffer;
+        internalTexture.width = baseLayer.framebufferWidth;
+        internalTexture.height = baseLayer.framebufferHeight;
+        internalTexture._framebuffer = baseLayer.framebuffer;
 
         // Create render target texture from the internal texture
         var renderTargetTexture = new RenderTargetTexture("XR renderTargetTexture", { width: internalTexture.width, height: internalTexture.height }, scene, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, true);
@@ -189,4 +189,4 @@ export class WebXRSessionManager implements IDisposable {
         this.onXRFrameObservable.clear();
         this.onXRSessionEnded.clear();
     }
-}
+}

+ 10 - 2
src/Engines/engine.ts

@@ -500,14 +500,14 @@ export class Engine {
      */
     // Not mixed with Version for tooling purpose.
     public static get NpmPackage(): string {
-        return "babylonjs@4.1.0-alpha.6";
+        return "babylonjs@4.1.0-alpha.7";
     }
 
     /**
      * Returns the current version of the framework
      */
     public static get Version(): string {
-        return "4.1.0-alpha.6";
+        return "4.1.0-alpha.7";
     }
 
     /**
@@ -1564,6 +1564,14 @@ export class Engine {
     }
 
     /**
+     * Gets a string idenfifying the name of the class
+     * @returns "Engine" string
+     */
+    public getClassName(): string {
+        return "Engine";
+    }
+
+    /**
      * Returns true if the stencil buffer has been enabled through the creation option of the context.
      */
     public get isStencilEnable(): boolean {

+ 10 - 10
src/Gamepads/xboxGamepad.ts

@@ -410,16 +410,16 @@ export class Xbox360Pad extends Gamepad {
             this.buttonY = this.browserGamepad.buttons[3].value;
             this.buttonLB = this.browserGamepad.buttons[4].value;
             this.buttonRB = this.browserGamepad.buttons[5].value;
-            this.leftTrigger = this.browserGamepad.axes[2];
-            this.rightTrigger = this.browserGamepad.axes[5];
-            this.buttonBack = this.browserGamepad.buttons[9].value;
-            this.buttonStart = this.browserGamepad.buttons[8].value;
-            this.buttonLeftStick = this.browserGamepad.buttons[6].value;
-            this.buttonRightStick = this.browserGamepad.buttons[7].value;
-            this.dPadUp = this.browserGamepad.buttons[11].value;
-            this.dPadDown = this.browserGamepad.buttons[12].value;
-            this.dPadLeft = this.browserGamepad.buttons[13].value;
-            this.dPadRight = this.browserGamepad.buttons[14].value;
+            this.leftTrigger = this.browserGamepad.buttons[6].value;
+            this.rightTrigger = this.browserGamepad.buttons[7].value;
+            this.buttonBack = this.browserGamepad.buttons[8].value;
+            this.buttonStart = this.browserGamepad.buttons[9].value;
+            this.buttonLeftStick = this.browserGamepad.buttons[10].value;
+            this.buttonRightStick = this.browserGamepad.buttons[11].value;
+            this.dPadUp = this.browserGamepad.buttons[12].value;
+            this.dPadDown = this.browserGamepad.buttons[13].value;
+            this.dPadLeft = this.browserGamepad.buttons[14].value;
+            this.dPadRight = this.browserGamepad.buttons[15].value;
         } else {
             this.buttonA = this.browserGamepad.buttons[0].value;
             this.buttonB = this.browserGamepad.buttons[1].value;

+ 1 - 1
src/Helpers/sceneHelpers.ts

@@ -213,7 +213,7 @@ Scene.prototype.createDefaultVRExperience = function(webVROptions: VRExperienceH
 Scene.prototype.createDefaultXRExperienceAsync = function(): Promise<WebXRExperienceHelper> {
     return WebXRExperienceHelper.CreateAsync(this).then((helper) => {
         var outputCanvas = new WebXRManagedOutputCanvas(helper);
-        return WebXREnterExitUI.CreateAsync(this, helper, { outputCanvasContext: outputCanvas.canvasContext })
+        return WebXREnterExitUI.CreateAsync(this, helper, { webXRManagedOutputCanvas: outputCanvas })
             .then((ui) => {
                 new WebXRInput(helper);
                 return helper;

+ 106 - 30
src/LibDeclarations/webxr.d.ts

@@ -1,41 +1,117 @@
-interface XRDevice {
-    requestSession(options: XRSessionCreationOptions): Promise<XRSession>;
-    supportsSession(options: XRSessionCreationOptions): Promise<void>;
+type XRSessionMode =
+    | "inline"
+    | "immersive-vr"
+    | "immersive-ar";
+
+type XRReferenceSpaceType =
+    | "viewer"
+    | "local"
+    | "local-floor"
+    | "bounded-floor"
+    | "unbounded";
+
+type XREnvironmentBlendMode =
+    | "opaque"
+    | "additive"
+    | "alpha-blend";
+
+type XRVisibilityState =
+    | "visible"
+    | "visible-blurred"
+    | "hidden";
+
+type XRHandedness =
+    | "none"
+    | "left"
+    | "right";
+
+type XRTargetRayMode =
+    | "gaze"
+    | "tracked-pointer"
+    | "screen";
+
+type XREye =
+    | "none"
+    | "left"
+    | "right";
+
+interface XRSpace extends EventTarget {
+
 }
-interface XRSession {
-    getInputSources(): Array<any>;
-    baseLayer: XRWebGLLayer;
-    requestFrameOfReference(type: string): Promise<void>;
-    requestHitTest(origin: Float32Array, direction: Float32Array, frameOfReference: any): any;
-    end(): Promise<void>;
-    requestAnimationFrame: Function;
-    addEventListener: Function;
+
+interface XRRenderState {
+    depthNear: number ;
+    depthFar: number ;
+    inlineVerticalFieldOfView: number | undefined;
+    baseLayer: XRWebGLLayer | undefined;
 }
-interface XRSessionCreationOptions {
-    outputContext?: WebGLRenderingContext | null;
-    immersive?: boolean;
-    environmentIntegration?: boolean;
+
+interface XRInputSource {
+    handedness: XRHandedness;
+    targetRayMode: XRTargetRayMode;
+    targetRaySpace: XRSpace;
+    gripSpace: XRSpace | undefined;
+    gamepad: Gamepad | undefined;
+    profiles: Array<string>;
 }
-interface XRLayer {
-    getViewport: Function;
-    framebufferWidth: number;
-    framebufferHeight: number;
+
+interface XRSession {
+    addEventListener: Function;
+    requestReferenceSpace(type: XRReferenceSpaceType): Promise<XRReferenceSpace>;
+    updateRenderState(XRRenderStateInit: any): Promise<void>;
+    requestAnimationFrame: Function;
+    end(): Promise<void>;
+    renderState: XRRenderState;
+    inputSources: Array<XRInputSource>;
+
 }
-interface XRView {
-    projectionMatrix: Float32Array;
+
+interface XRReferenceSpace extends XRSpace {
+    getOffsetReferenceSpace(originOffset: XRRigidTransform): XRReferenceSpace;
+    onreset: any;
 }
+
 interface XRFrame {
-    getDevicePose: Function;
-    getInputPose: Function;
-    views: Array<XRView>;
-    baseLayer: XRLayer;
+    session: XRSession;
+    getViewerPose(referenceSpace: XRReferenceSpace): XRViewerPose | undefined;
+    getPose(space: XRSpace, baseSpace: XRSpace): XRPose | undefined;
 }
-interface XRFrameOfReference {
+
+interface XRViewerPose extends XRPose {
+    views: Array<XRView>;
 }
-interface XRWebGLLayer extends XRLayer {
-    framebuffer: WebGLFramebuffer;
+
+interface XRPose {
+    transform: XRRigidTransform;
+    emulatedPosition: boolean;
 }
+
 declare var XRWebGLLayer: {
     prototype: XRWebGLLayer;
-    new(session: XRSession, context?: WebGLRenderingContext): XRWebGLLayer;
-};
+    new(session: XRSession, context: WebGLRenderingContext | undefined): XRWebGLLayer;
+};
+interface XRWebGLLayer {
+    framebuffer: WebGLFramebuffer;
+    framebufferWidth: number;
+    framebufferHeight: number;
+    getViewport: Function;
+}
+
+interface XRRigidTransform {
+    position: DOMPointReadOnly;
+    orientation: DOMPointReadOnly;
+    matrix: Float32Array;
+    inverse: XRRigidTransform;
+}
+
+interface XRView {
+    eye: XREye;
+    projectionMatrix: Float32Array;
+    transform: XRRigidTransform;
+}
+
+interface XRInputSourceChangeEvent {
+    session: XRSession;
+    removed: Array<XRInputSource>;
+    added: Array<XRInputSource>;
+}

+ 10 - 8
src/Materials/Textures/texture.ts

@@ -10,6 +10,7 @@ import { _AlphaState } from "../../States/index";
 import { _TypeStore } from '../../Misc/typeStore';
 import { _DevTools } from '../../Misc/devTools';
 import { IInspectable } from '../../Misc/iInspectable';
+import { Engine } from '../../Engines/engine';
 
 declare type CubeTexture = import("../../Materials/Textures/cubeTexture").CubeTexture;
 declare type MirrorTexture = import("../../Materials/Textures/mirrorTexture").MirrorTexture;
@@ -257,7 +258,7 @@ export class Texture extends BaseTexture {
      * This represents a texture in babylon. It can be easily loaded from a network, base64 or html input.
      * @see http://doc.babylonjs.com/babylon101/materials#texture
      * @param url define the url of the picture to load as a texture
-     * @param scene define the scene the texture will belong to
+     * @param scene define the scene or engine the texture will belong to
      * @param noMipmap define if the texture will require mip maps or not
      * @param invertY define if the texture needs to be inverted on the y axis during loading
      * @param samplingMode define the sampling mode we want for the texture while fectching from it (Texture.NEAREST_SAMPLINGMODE...)
@@ -267,8 +268,8 @@ export class Texture extends BaseTexture {
      * @param deleteBuffer define if the buffer we are loading the texture from should be deleted after load
      * @param format define the format of the texture we are trying to load (Engine.TEXTUREFORMAT_RGBA...)
      */
-    constructor(url: Nullable<string>, scene: Nullable<Scene>, noMipmap: boolean = false, invertY: boolean = true, samplingMode: number = Texture.TRILINEAR_SAMPLINGMODE, onLoad: Nullable<() => void> = null, onError: Nullable<(message?: string, exception?: any) => void> = null, buffer: Nullable<string | ArrayBuffer | HTMLImageElement | Blob> = null, deleteBuffer: boolean = false, format?: number) {
-        super(scene);
+    constructor(url: Nullable<string>, sceneOrEngine: Nullable<Scene | Engine>, noMipmap: boolean = false, invertY: boolean = true, samplingMode: number = Texture.TRILINEAR_SAMPLINGMODE, onLoad: Nullable<() => void> = null, onError: Nullable<(message?: string, exception?: any) => void> = null, buffer: Nullable<string | ArrayBuffer | HTMLImageElement | Blob> = null, deleteBuffer: boolean = false, format?: number) {
+        super((sceneOrEngine && sceneOrEngine.getClassName() === "Scene") ? (sceneOrEngine as Scene) : null);
 
         this.name = url || "";
         this.url = url;
@@ -281,12 +282,13 @@ export class Texture extends BaseTexture {
             this._format = format;
         }
 
-        scene = this.getScene();
+        var scene = this.getScene();
+        var engine = (sceneOrEngine && (sceneOrEngine as Engine).getCaps) ? (sceneOrEngine as Engine) : (scene ? scene.getEngine() : null);
 
-        if (!scene) {
+        if (!engine) {
             return;
         }
-        scene.getEngine().onBeforeTextureInitObservable.notifyObservers(this);
+        engine.onBeforeTextureInitObservable.notifyObservers(this);
 
         let load = () => {
             if (this._texture) {
@@ -331,8 +333,8 @@ export class Texture extends BaseTexture {
         this._texture = this._getFromCache(this.url, noMipmap, samplingMode, invertY);
 
         if (!this._texture) {
-            if (!scene.useDelayedTextureLoading) {
-                this._texture = scene.getEngine().createTexture(this.url, noMipmap, invertY, scene, samplingMode, load, onError, this._buffer, undefined, this._format);
+            if (!scene || !scene.useDelayedTextureLoading) {
+                this._texture = engine.createTexture(this.url, noMipmap, invertY, scene, samplingMode, load, onError, this._buffer, undefined, this._format);
                 if (deleteBuffer) {
                     delete this._buffer;
                 }

+ 192 - 0
src/Materials/effectRenderer.ts

@@ -0,0 +1,192 @@
+import { Nullable } from '../types';
+import { Texture } from '../Materials/Textures/texture';
+import { Engine } from '../Engines/engine';
+import { VertexBuffer } from '../Meshes/buffer';
+import { Viewport } from '../Maths/math';
+import { Constants } from '../Engines/constants';
+import { Observable } from '../Misc/observable';
+import { Effect } from './effect';
+import { DataBuffer } from '../Meshes/dataBuffer';
+
+/**
+ * Helper class to render one or more effects
+ */
+export class EffectRenderer {
+    // Fullscreen quad buffers
+    private static _Vertices = [1, 1, -1, 1, -1, -1, 1, -1];
+    private static _Indices = [0, 1, 2, 0, 2, 3];
+    private _vertexBuffers: {[key: string]: VertexBuffer};
+    private _indexBuffer: DataBuffer;
+
+    private _ringBufferIndex = 0;
+    private _ringScreenBuffer: Nullable<Array<Texture>> = null;
+
+    private _getNextFrameBuffer(incrementIndex = true) {
+        if (!this._ringScreenBuffer) {
+            this._ringScreenBuffer = [];
+            for (var i = 0; i < 2; i++) {
+                var internalTexture = this.engine.createRenderTargetTexture(
+                    {
+                        width: this.engine.getRenderWidth(true),
+                        height: this.engine.getRenderHeight(true),
+                    },
+                    {
+                        generateDepthBuffer: false,
+                        generateStencilBuffer: false,
+                        generateMipMaps: false,
+                        samplingMode: Constants.TEXTURE_NEAREST_NEAREST,
+                    },
+                );
+                var texture = new Texture("", null);
+                texture._texture = internalTexture;
+                this._ringScreenBuffer.push(texture);
+            }
+        }
+        var ret = this._ringScreenBuffer[this._ringBufferIndex];
+        if (incrementIndex) {
+            this._ringBufferIndex = (this._ringBufferIndex + 1) % 2;
+        }
+        return ret;
+    }
+
+    /**
+     * Creates an effect renderer
+     * @param engine the engine to use for rendering
+     */
+    constructor(private engine: Engine) {
+        this._vertexBuffers = {
+            [VertexBuffer.PositionKind]: new VertexBuffer(engine, EffectRenderer._Vertices, VertexBuffer.PositionKind, false, false, 2),
+        };
+        this._indexBuffer = engine.createIndexBuffer(EffectRenderer._Indices);
+    }
+
+    /**
+     * renders one or more effects to a specified texture
+     * @param effectWrappers list of effects to renderer
+     * @param outputTexture texture to draw to, if null it will render to the screen
+     */
+    render(effectWrappers: Array<EffectWrapper> | EffectWrapper, outputTexture: Nullable<Texture> = null) {
+        if (!Array.isArray(effectWrappers)) {
+            effectWrappers = [effectWrappers];
+        }
+
+        // Ensure all effects are ready
+        for (var wrapper of effectWrappers) {
+            if (!wrapper.effect.isReady()) {
+                return;
+            }
+        }
+
+        effectWrappers.forEach((effectWrapper, i) => {
+            var renderTo = outputTexture;
+
+            // for any next effect make it's input the output of the previous effect
+            if (i !== 0) {
+                effectWrapper.effect.onBindObservable.addOnce(() => {
+                    effectWrapper.effect.setTexture("textureSampler", this._getNextFrameBuffer(false));
+                });
+            }
+
+            // Set the output to the next screenbuffer
+            if ((effectWrappers as Array<EffectWrapper>).length > 1 && i != (effectWrappers as Array<EffectWrapper>).length - 1) {
+                renderTo = this._getNextFrameBuffer();
+            }else {
+                renderTo = outputTexture;
+            }
+
+            // Reset state
+            this.engine.setViewport(new Viewport(0, 0, 1, 1));
+            this.engine.enableEffect(effectWrapper.effect);
+
+            // Bind buffers
+            if (renderTo) {
+                this.engine.bindFramebuffer(renderTo.getInternalTexture()!);
+            }
+            this.engine.bindBuffers(this._vertexBuffers, this._indexBuffer, effectWrapper.effect);
+            effectWrapper.onApplyObservable.notifyObservers({});
+
+            // Render
+            this.engine.drawElementsType(Constants.MATERIAL_TriangleFillMode, 0, 6);
+            if (renderTo) {
+                this.engine.unBindFramebuffer(renderTo.getInternalTexture()!);
+            }
+        });
+    }
+
+    /**
+     * Disposes of the effect renderer
+     */
+    dispose() {
+        if (this._ringScreenBuffer) {
+            this._ringScreenBuffer.forEach((b) => {
+                b.dispose();
+            });
+            this._ringScreenBuffer = null;
+        }
+
+        var vertexBuffer = this._vertexBuffers[VertexBuffer.PositionKind];
+        if (vertexBuffer) {
+            vertexBuffer.dispose();
+            delete this._vertexBuffers[VertexBuffer.PositionKind];
+        }
+
+        if (this._indexBuffer) {
+            this.engine._releaseBuffer(this._indexBuffer);
+        }
+    }
+}
+
+/**
+ * Options to create an EffectWrapper
+ */
+interface EffectWrapperCreationOptions {
+    /**
+     * Engine to use to create the effect
+     */
+    engine: Engine;
+    /**
+     * Fragment shader for the effect
+     */
+    fragmentShader: string;
+    /**
+     * Attributes to use in the shader
+     */
+    attributeNames: Array<string>;
+    /**
+     * Uniforms to use in the shader
+     */
+    uniformNames: Array<string>;
+    /**
+     * Texture sampler names to use in the shader
+     */
+    samplerNames: Array<string>;
+}
+
+/**
+ * Wraps an effect to be used for rendering
+ */
+export class EffectWrapper {
+    /**
+     * Event that is fired right before the effect is drawn (should be used to update uniforms)
+     */
+    public onApplyObservable = new Observable<{}>();
+    /**
+     * The underlying effect
+     */
+    public effect: Effect;
+
+    /**
+     * Creates an effect to be renderer
+     * @param creationOptions options to create the effect
+     */
+    constructor(creationOptions: EffectWrapperCreationOptions) {
+        this.effect = new Effect({fragmentSource: creationOptions.fragmentShader, vertex: "postprocess"}, creationOptions.attributeNames, creationOptions.uniformNames, creationOptions.samplerNames, creationOptions.engine);
+    }
+
+     /**
+     * Disposes of the effect wrapper
+     */
+    public dispose() {
+        this.effect.dispose();
+    }
+}

+ 2 - 1
src/Materials/index.ts

@@ -14,4 +14,5 @@ export * from "./standardMaterial";
 export * from "./Textures/index";
 export * from "./uniformBuffer";
 export * from "./materialFlags";
-export * from "./Node/index";
+export * from "./Node/index";
+export * from "./effectRenderer";

+ 4 - 2
src/Meshes/Builders/cylinderBuilder.ts

@@ -8,6 +8,8 @@ VertexData.CreateCylinder = function(options: { height?: number, diameterTop?: n
     var height: number = options.height || 2;
     var diameterTop: number = (options.diameterTop === 0) ? 0 : options.diameterTop || options.diameter || 1;
     var diameterBottom: number = (options.diameterBottom === 0) ? 0 : options.diameterBottom || options.diameter || 1;
+    diameterTop = diameterTop || 0.00001; // Prevent broken normals
+    diameterBottom = diameterBottom || 0.00001; // Prevent broken normals
     var tessellation: number = options.tessellation || 24;
     var subdivisions: number = options.subdivisions || 1;
     var hasRings: boolean = options.hasRings ? true : false;
@@ -239,11 +241,11 @@ VertexData.CreateCylinder = function(options: { height?: number, diameterTop?: n
     // add caps to geometry based on cap parameter
     if ((cap === Mesh.CAP_START)
         || (cap === Mesh.CAP_ALL)) {
-        createCylinderCap(false);
+        createCylinderCap(false);
     }
     if ((cap === Mesh.CAP_END)
         || (cap === Mesh.CAP_ALL)) {
-        createCylinderCap(true);
+        createCylinderCap(true);
     }
 
     // Sides

+ 3 - 0
src/Meshes/instancedMesh.ts

@@ -47,6 +47,9 @@ export class InstancedMesh extends AbstractMesh {
         this.setPivotMatrix(source.getPivotMatrix());
 
         this.refreshBoundingInfo();
+        if (!this._sourceMesh.subMeshes) {
+            Logger.Warn("Instances should only be created for meshes with Geometry.");
+        }
         this._syncSubMeshes();
     }
 

+ 10 - 1
src/Misc/assetsManager.ts

@@ -820,6 +820,13 @@ export class AssetsManager {
     public useDefaultLoadingScreen = true;
 
     /**
+     * Gets or sets a boolean defining if the AssetsManager should automatically hide the loading screen
+     * when all assets have been downloaded.
+     * If set to false, you need to manually call in hideLoadingUI() once your scene is ready.
+     */
+    public autoHideLoadingUI = true;
+
+    /**
      * Creates a new AssetsManager
      * @param scene defines the scene to work on
      */
@@ -1010,7 +1017,9 @@ export class AssetsManager {
                 console.log(e);
             }
             this._isLoading = false;
-            this._scene.getEngine().hideLoadingUI();
+            if (this.autoHideLoadingUI) {
+                this._scene.getEngine().hideLoadingUI();
+            }
         }
     }
 

+ 5 - 3
src/Misc/basis.ts

@@ -180,7 +180,6 @@ export class BasisTools {
      * @param transcodeResult the result of transcoding the basis file to load from
      */
     public static LoadTextureFromTranscodeResult(texture: InternalTexture, transcodeResult: TranscodeResult) {
-        texture._invertVScale = texture.invertY;
         for (var i = 0; i < transcodeResult.fileInfo.images.length; i++) {
             var rootImage = transcodeResult.fileInfo.images[i].levels[0];
             texture._invertVScale = texture.invertY;
@@ -189,6 +188,9 @@ export class BasisTools {
                 texture.type = Engine.TEXTURETYPE_UNSIGNED_SHORT_5_6_5;
                 texture.format = Engine.TEXTUREFORMAT_RGB;
 
+                // Fallback is already inverted
+                texture._invertVScale = !texture.invertY;
+
                 if (texture.getEngine().webGLVersion < 2 && (Scalar.Log2(texture.width) % 1 !== 0 || Scalar.Log2(texture.height) % 1 !== 0)) {
                     // Create non power of two texture
                     let source = new InternalTexture(texture.getEngine(), InternalTexture.DATASOURCE_TEMP);
@@ -377,7 +379,7 @@ function workerFunc(): void {
         return info;
     }
 
-    function TranscodeLevel(loadedFile: any, imageIndex: number, levelIndex: number, format: number, convertToRgb565: boolean) {
+    function TranscodeLevel(loadedFile: any, imageIndex: number, levelIndex: number, format: number, convertToRgb565: boolean): Nullable<Uint16Array> {
         var dstSize = loadedFile.getImageTranscodedSizeInBytes(imageIndex, levelIndex, format);
         var dst = new Uint8Array(dstSize);
         if (!loadedFile.transcodeImage(dst, imageIndex, levelIndex, format, 1, 0)) {
@@ -403,7 +405,7 @@ function workerFunc(): void {
      * @param  height aligned height of the image
      * @return the converted pixels
      */
-    function ConvertDxtToRgb565(src: Uint16Array, srcByteOffset: number, width: number, height: number): Uint16Array {
+    function ConvertDxtToRgb565(src: Uint8Array, srcByteOffset: number, width: number, height: number): Uint16Array {
         var c = new Uint16Array(4);
         var dst = new Uint16Array(width * height);
 

+ 2 - 2
src/Misc/webRequest.ts

@@ -13,7 +13,7 @@ export class WebRequest {
     /**
      * Add callback functions in this array to update all the requests before they get sent to the network
      */
-    public static CustomRequestModifiers = new Array<(request: XMLHttpRequest) => void>();
+    public static CustomRequestModifiers = new Array<(request: XMLHttpRequest, url: string) => void>();
 
     private _injectCustomRequestHeaders(): void {
         for (let key in WebRequest.CustomRequestHeaders) {
@@ -126,7 +126,7 @@ export class WebRequest {
      */
     public open(method: string, url: string): void {
         for (var update of WebRequest.CustomRequestModifiers) {
-            update(this._xhr);
+            update(this._xhr, url);
         }
 
         // Clean url

+ 5 - 1
src/Physics/Plugins/ammoJSPlugin.ts

@@ -462,7 +462,11 @@ export class AmmoJSPlugin implements IPhysicsEnginePlugin {
      */
     public removePhysicsBody(impostor: PhysicsImpostor) {
         if (this.world) {
-            this.world.removeRigidBody(impostor.physicsBody);
+            if (impostor.soft) {
+                this.world.removeSoftBody(impostor.physicsBody);
+            }else {
+                this.world.removeRigidBody(impostor.physicsBody);
+            }
 
             if (impostor._pluginData) {
                 impostor._pluginData.toDispose.forEach((d: any) => {

+ 3 - 0
src/Physics/physicsImpostor.ts

@@ -441,6 +441,9 @@ export class PhysicsImpostor {
             Logger.Error("No object was provided. A physics object is obligatory");
             return;
         }
+        if (this.object.parent && _options.mass !== 0) {
+            Logger.Warn("A physics impostor has been created for an object which has a parent. Babylon physics currently works in local space so unexpected issues may occur.");
+        }
 
         // Legacy support for old syntax.
         if (!this._scene && object.getScene) {

+ 31 - 3
src/Rendering/depthRenderer.ts

@@ -25,6 +25,11 @@ export class DepthRenderer {
     private _scene: Scene;
     private _depthMap: RenderTargetTexture;
     private _effect: Effect;
+    private readonly _storeNonLinearDepth: boolean;
+    private readonly _clearColor: Color4;
+
+    /** Get if the depth renderer is using packed depth or not */
+    public readonly isPacked: boolean;
 
     private _cachedDefines: string;
     private _camera: Nullable<Camera>;
@@ -46,16 +51,29 @@ export class DepthRenderer {
      * @param scene The scene the renderer belongs to
      * @param type The texture type of the depth map (default: Engine.TEXTURETYPE_FLOAT)
      * @param camera The camera to be used to render the depth map (default: scene's active camera)
+     * @param storeNonLinearDepth Defines whether the depth is stored linearly like in Babylon Shadows or directly like glFragCoord.z
      */
-    constructor(scene: Scene, type: number = Constants.TEXTURETYPE_FLOAT, camera: Nullable<Camera> = null) {
+    constructor(scene: Scene, type: number = Constants.TEXTURETYPE_FLOAT, camera: Nullable<Camera> = null, storeNonLinearDepth = false) {
         this._scene = scene;
+        this._storeNonLinearDepth = storeNonLinearDepth;
+        this.isPacked = type === Constants.TEXTURETYPE_UNSIGNED_BYTE;
+        if (this.isPacked) {
+            this._clearColor = new Color4(1.0, 1.0, 1.0, 1.0);
+        }
+        else {
+            this._clearColor = new Color4(1.0, 0.0, 0.0, 1.0);
+        }
+
         DepthRenderer._SceneComponentInitialization(this._scene);
 
         this._camera = camera;
         var engine = scene.getEngine();
 
         // Render target
-        this._depthMap = new RenderTargetTexture("depthMap", { width: engine.getRenderWidth(), height: engine.getRenderHeight() }, this._scene, false, true, type);
+        var format = (this.isPacked || engine.webGLVersion === 1) ? Constants.TEXTUREFORMAT_RGBA : Constants.TEXTUREFORMAT_R;
+        this._depthMap = new RenderTargetTexture("depthMap", { width: engine.getRenderWidth(), height: engine.getRenderHeight() }, this._scene, false, true, type,
+            false, undefined, undefined, undefined, undefined,
+            format);
         this._depthMap.wrapU = Texture.CLAMP_ADDRESSMODE;
         this._depthMap.wrapV = Texture.CLAMP_ADDRESSMODE;
         this._depthMap.refreshRate = 1;
@@ -69,7 +87,7 @@ export class DepthRenderer {
 
         // set default depth value to 1.0 (far away)
         this._depthMap.onClearObservable.add((engine) => {
-            engine.clear(new Color4(1.0, 1.0, 1.0, 1.0), true, true, true);
+            engine.clear(this._clearColor, true, true, true);
         });
 
         // Custom render function
@@ -214,6 +232,16 @@ export class DepthRenderer {
             MaterialHelper.PushAttributesForInstances(attribs);
         }
 
+        // None linear depth
+        if (this._storeNonLinearDepth) {
+            defines.push("#define NONLINEARDEPTH");
+        }
+
+        // Float Mode
+        if (this.isPacked) {
+            defines.push("#define PACKED");
+        }
+
         // Get correct effect
         var join = defines.join("\n");
         if (this._cachedDefines !== join) {

+ 5 - 4
src/Rendering/depthRendererSceneComponent.ts

@@ -15,9 +15,10 @@ declare module "../scene" {
         /**
          * Creates a depth renderer a given camera which contains a depth map which can be used for post processing.
          * @param camera The camera to create the depth renderer on (default: scene's active camera)
+         * @param storeNonLinearDepth Defines whether the depth is stored linearly like in Babylon Shadows or directly like glFragCoord.z
          * @returns the created depth renderer
          */
-        enableDepthRenderer(camera?: Nullable<Camera>): DepthRenderer;
+        enableDepthRenderer(camera?: Nullable<Camera>, storeNonLinearDepth?: boolean): DepthRenderer;
 
         /**
          * Disables a depth renderer for a given camera
@@ -27,7 +28,7 @@ declare module "../scene" {
     }
 }
 
-Scene.prototype.enableDepthRenderer = function(camera?: Nullable<Camera>): DepthRenderer {
+Scene.prototype.enableDepthRenderer = function(camera?: Nullable<Camera>, storeNonLinearDepth = false): DepthRenderer {
     camera = camera || this.activeCamera;
     if (!camera) {
         throw "No camera available to enable depth renderer";
@@ -43,9 +44,9 @@ Scene.prototype.enableDepthRenderer = function(camera?: Nullable<Camera>): Depth
         else if (this.getEngine().getCaps().textureFloatRender) {
             textureType = Constants.TEXTURETYPE_FLOAT;
         } else {
-            throw "Depth renderer does not support int texture type";
+            textureType = Constants.TEXTURETYPE_UNSIGNED_BYTE;
         }
-        this._depthRenderer[camera.id] = new DepthRenderer(this, textureType, camera);
+        this._depthRenderer[camera.id] = new DepthRenderer(this, textureType, camera, storeNonLinearDepth);
     }
 
     return this._depthRenderer[camera.id];

+ 16 - 0
src/Shaders/ShadersInclude/packingFunctions.fx

@@ -0,0 +1,16 @@
+vec4 pack(float depth)
+{
+    const vec4 bit_shift = vec4(255.0 * 255.0 * 255.0, 255.0 * 255.0, 255.0, 1.0);
+    const vec4 bit_mask = vec4(0.0, 1.0 / 255.0, 1.0 / 255.0, 1.0 / 255.0);
+
+    vec4 res = fract(depth * bit_shift);
+    res -= res.xxyz * bit_mask;
+
+    return res;
+}
+
+float unpack(vec4 color)
+{
+    const vec4 bit_shift = vec4(1.0 / (255.0 * 255.0 * 255.0), 1.0 / (255.0 * 255.0), 1.0 / 255.0, 1.0);
+    return dot(color, bit_shift);
+}

+ 1 - 0
src/Shaders/ShadersInclude/shadowsFragmentFunctions.fx

@@ -1,5 +1,6 @@
 #ifdef SHADOWS
     #ifndef SHADOWFLOAT
+        // Dupplicate to prevent include in include issues
         float unpack(vec4 color)
         {
             const vec4 bit_shift = vec4(1.0 / (255.0 * 255.0 * 255.0), 1.0 / (255.0 * 255.0), 1.0 / 255.0, 1.0);

+ 17 - 1
src/Shaders/depth.fragment.fx

@@ -5,6 +5,10 @@ uniform sampler2D diffuseSampler;
 
 varying float vDepthMetric;
 
+#ifdef PACKED
+	#include<packingFunctions>
+#endif
+
 void main(void)
 {
 #ifdef ALPHATEST
@@ -12,5 +16,17 @@ void main(void)
 		discard;
 #endif
 
-	gl_FragColor = vec4(vDepthMetric, vDepthMetric * vDepthMetric, 0.0, 1.0);
+#ifdef NONLINEARDEPTH
+	#ifdef PACKED
+		gl_FragColor = pack(gl_FragCoord.z);
+	#else
+		gl_FragColor = vec4(gl_FragCoord.z, 0.0, 0.0, 0.0);
+	#endif
+#else
+	#ifdef PACKED
+		gl_FragColor = pack(vDepthMetric);
+	#else
+		gl_FragColor = vec4(vDepthMetric, 0.0, 0.0, 1.0);
+	#endif
+#endif
 }

+ 1 - 16
src/Shaders/kernelBlur.fragment.fx

@@ -23,22 +23,7 @@ varying vec2 sampleCenter;
 #include<kernelBlurVaryingDeclaration>[0..varyingCount]
 
 #ifdef PACKEDFLOAT
-    vec4 pack(float depth)
-    {
-        const vec4 bit_shift = vec4(255.0 * 255.0 * 255.0, 255.0 * 255.0, 255.0, 1.0);
-        const vec4 bit_mask = vec4(0.0, 1.0 / 255.0, 1.0 / 255.0, 1.0 / 255.0);
-
-        vec4 res = fract(depth * bit_shift);
-        res -= res.xxyz * bit_mask;
-
-        return res;
-    }
-
-    float unpack(vec4 color)
-    {
-        const vec4 bit_shift = vec4(1.0 / (255.0 * 255.0 * 255.0), 1.0 / (255.0 * 255.0), 1.0 / 255.0, 1.0);
-        return dot(color, bit_shift);
-    }
+	#include<packingFunctions>
 #endif
 
 void main(void)

+ 1 - 10
src/Shaders/shadowMap.fragment.fx

@@ -1,14 +1,5 @@
 #ifndef FLOAT
-vec4 pack(float depth)
-{
-    const vec4 bit_shift = vec4(255.0 * 255.0 * 255.0, 255.0 * 255.0, 255.0, 1.0);
-    const vec4 bit_mask = vec4(0.0, 1.0 / 255.0, 1.0 / 255.0, 1.0 / 255.0);
-
-    vec4 res = fract(depth * bit_shift);
-    res -= res.xxyz * bit_mask;
-
-    return res;
-}
+	#include<packingFunctions>
 #endif
 
 varying float vDepthMetric;

+ 1 - 9
src/Shaders/standard.fragment.fx

@@ -212,15 +212,7 @@ uniform vec2 dsOffsets[9];
 uniform float halfDestPixelSize;
 
 #ifdef FINAL_DOWN_SAMPLER
-vec4 pack(float value) {
-	const vec4 bit_shift = vec4(255.0 * 255.0 * 255.0, 255.0 * 255.0, 255.0, 1.0);
-	const vec4 bit_mask = vec4(0.0, 1.0 / 255.0, 1.0 / 255.0, 1.0 / 255.0);
-
-	vec4 res = fract(value * bit_shift);
-	res -= res.xxyz * bit_mask;
-
-	return res;
-}
+	#include<packingFunctions>
 #endif
 
 void main()

+ 6 - 0
src/scene.ts

@@ -3523,10 +3523,16 @@ export class Scene extends AbstractScene implements IAnimatable {
 
             this._intermediateRendering = false;
 
+            // Need to bind if sub-camera has an outputRenderTarget eg. for webXR
+            if (this.activeCamera && this.activeCamera.outputRenderTarget) {
+                needRebind = true;
+            }
+
             // Restore framebuffer after rendering to targets
             if (needRebind) {
                 this._bindFrameBuffer();
             }
+
         }
 
         this.onAfterRenderTargetsRenderObservable.notifyObservers(this);

BIN
tests/validation/ReferenceImages/depthRenderer.png