gemercheung 3 lat temu
rodzic
commit
719bf02500

+ 31 - 69
dist/js/video.js

@@ -1,6 +1,6 @@
 /**
 * Name: Metaverse
-* Date: 2022/4/21
+* Date: 2022/4/24
 * Author: https://www.4dkankan.com
 * Copyright © 2022 4DAGE Co., Ltd. All rights reserved.
 * Licensed under the GLP license
@@ -2174,12 +2174,16 @@
             var id = _ref3.id;
             return id === _this4.decodingId;
           });
-          var fps = 1000 / (Date.now() - this.start) * clip.data.length;
-          console.log("Decoded ".concat(clip.data.length, " frames in ").concat(Date.now() - this.start, "ms @ ").concat(fps >> 0, "FPS"));
+
+          if (clip) {
+            var fps = 1000 / (Date.now() - this.start) * clip.data.length;
+            console.log("Decoded ".concat(clip.data.length, " frames in ").concat(Date.now() - this.start, "ms @ ").concat(fps >> 0, "FPS"));
+          }
+
           this.decoding = false;
           this.decodingId = null;
           tempId = 0;
-          this.emit("decodeDone", clip.id);
+          clip && clip.id && this.emit("decodeDone", clip.id);
         }
       }
     }, {
@@ -2566,65 +2570,25 @@
   }();
 
   var canvas = null;
-  var yuvSurfaceShader = null;
-  var yTexture = null;
-  var uTexture = null;
-  var vTexture = null;
 
   function initWebGLCanvas() {
     canvas = document.createElement("canvas");
     canvas.id = "test_canvas";
     canvas.style = "position: fixed;top:0;left: 0;z-index: 100;";
     var gl = canvas.getContext("webgl");
-    yuvSurfaceShader = YUVSurfaceShader.create(gl);
-    yTexture = Texture.create(gl, gl.LUMINANCE);
-    uTexture = Texture.create(gl, gl.LUMINANCE);
-    vTexture = Texture.create(gl, gl.LUMINANCE);
+    YUVSurfaceShader.create(gl);
+    Texture.create(gl, gl.LUMINANCE);
+    Texture.create(gl, gl.LUMINANCE);
+    Texture.create(gl, gl.LUMINANCE);
     document.body.append(canvas);
   }
 
-  function draw(buffer, width, height) {
-    canvas.width = width;
-    canvas.height = height; // the width & height returned are actually padded, so we have to use the frame size to get the real image dimension
-    // when uploading to texture
-
-    var stride = width; // stride
-    // height is padded with filler rows
-    // if we knew the size of the video before encoding, we could cut out the black filler pixels. We don't, so just set
-    // it to the size after encoding
-
-    var sourceWidth = width;
-    var sourceHeight = height;
-    var maxXTexCoord = sourceWidth / stride;
-    var maxYTexCoord = sourceHeight / height;
-    var lumaSize = stride * height;
-    var chromaSize = lumaSize >> 2;
-    var yBuffer = buffer.subarray(0, lumaSize);
-    var uBuffer = buffer.subarray(lumaSize, lumaSize + chromaSize);
-    var vBuffer = buffer.subarray(lumaSize + chromaSize, lumaSize + 2 * chromaSize); //   console.log("yBuffer", 1);
-
-    window.updateTexture(yBuffer);
-    var chromaHeight = height >> 1;
-    var chromaStride = stride >> 1; // we upload the entire image, including stride padding & filler rows. The actual visible image will be mapped
-    // from texture coordinates as to crop out stride padding & filler rows using maxXTexCoord and maxYTexCoord.
-
-    yTexture.image2dBuffer(yBuffer, stride, height);
-    uTexture.image2dBuffer(uBuffer, chromaStride, chromaHeight);
-    vTexture.image2dBuffer(vBuffer, chromaStride, chromaHeight);
-    yuvSurfaceShader.setTexture(yTexture, uTexture, vTexture);
-    yuvSurfaceShader.updateShaderData({
-      w: width,
-      h: height
-    }, {
-      maxXTexCoord,
-      maxYTexCoord
-    }); // debugger
-    // data = window.changeTexture(data);
-    // window.updateTexture( data );
-
-    yuvSurfaceShader.draw();
-  }
-
+  var socket = io("ws://192.168.0.150:3000", {
+    reconnectionDelayMax: 10000
+  });
+  socket.on("connect", function (data) {
+    console.log("socket connect");
+  });
   var vDecoder = new VDecoder({
     maxChip: 100
   });
@@ -2633,8 +2597,8 @@
 
     initWebGLCanvas();
     vDecoder.fetch({
-      path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/earth",
-      range: [8, 100]
+      path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/1011",
+      range: [0, 66]
     });
     vDecoder.on("fetchDone", function (clip) {
       console.log("fetchDone", clip);
@@ -2642,10 +2606,11 @@
 
     vDecoder.on("decodeData", function (data) {
       // console.log("decodeData", data);
-      var width = data.width,
-          height = data.height,
-          buffer = data.data;
-      draw(new Uint8Array(buffer), width, height);
+      data.width;
+          data.height;
+          data.data; // draw(new Uint8Array(buffer), width, height);
+      // window.updateTexture( new Uint8Array(buffer) );
+      // window.up
     });
     vDecoder.on("decodeDone", /*#__PURE__*/function () {
       var _ref = _asyncToGenerator( /*#__PURE__*/regenerator.mark(function _callee(id) {
@@ -2653,16 +2618,13 @@
           while (1) {
             switch (_context.prev = _context.next) {
               case 0:
+                // vDecoder.fetch({
+                //   path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/1011",
+                //   range: [0, 66],
+                // });
+                // console.log("clipId", clipId);
 
-                _context.next = 3;
-                return vDecoder.fetch({
-                  path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/14_test",
-                  range: [0, 28]
-                });
-
-              case 3:
-
-              case 4:
+              case 1:
               case "end":
                 return _context.stop();
             }

Plik diff jest za duży
+ 1 - 1
dist/js/video.js.map


+ 1 - 0
dist/texture.html

@@ -23,6 +23,7 @@
     <script src="./libs/babylonjs.serializers.min.js"></script>
     <script src="./libs/babylon.gui.min.js"></script>
     <script src="./libs/babylon.inspector.bundle.js"></script>
+    <script src="https://cdn.socket.io/4.4.1/socket.io.min.js"></script>
         <script src="js/video.js"></script>
         <style>
             html, body {

+ 38 - 16
src/h264Decoder/VDecoder.js

@@ -48,10 +48,12 @@ export class VDecoder extends EventEmitter {
       switch (message.type) {
         case "pictureReady":
           //   onPictureReady(message);
+          console.log("[VDecoder]::decodeData", Object.assign(message, { clipId: this.decodingId }));
           this.emit(
             "decodeData",
             Object.assign(message, { clipId: this.decodingId })
           );
+
           if (this.decoding && this.decodingId) {
             this.decodeNext(this.decodingId);
           }
@@ -81,9 +83,17 @@ export class VDecoder extends EventEmitter {
       this.flush();
       console.log("flush");
     }
-    const rangeFetch = range(rangeArray[0], rangeArray[1] + 1);
-    // console.log("url", url);
-    // console.log("rangeFetch", rangeFetch);
+
+    let rangeFetch = [];
+
+    if (rangeArray[0] < rangeArray[1]) {
+      rangeFetch = range(rangeArray[0], rangeArray[1] + 1);
+      console.log("[VDecoder]:顺时 +", rangeFetch);
+    } else {
+      rangeFetch = range(rangeArray[1], rangeArray[0] + 1).reverse();
+      console.log("[VDecoder]:逆时 -", rangeFetch);
+    }
+
     const allFetch = rangeFetch.map((i) => {
       return fetch(`${url}/${i}`).then((response) => {
         return response.arrayBuffer().then(function (buffer) {
@@ -92,18 +102,27 @@ export class VDecoder extends EventEmitter {
       });
     });
 
-    return Promise.all(allFetch).then((data) => {
-      const clip = { id: uuidv4(), data };
-      this.emit("fetchDone", clip);
-      this.cacheBuffer = data.slice();
-      this.tempVideos.push(clip);
-      if (decode) {
-        this.start = Date.now();
-        this.cacheBufferTotal = clip.data.length;
-        this.decodeNext(clip.id);
-      }
-      return Promise.resolve(clip);
-    });
+    return Promise.all(allFetch)
+      .then((data) => {
+        const clip = { id: uuidv4(), data: data };
+        if (data.length > 0) {
+          this.emit("fetchDone", clip);
+          this.cacheBuffer = data.slice();
+          this.tempVideos.push(clip);
+          console.log("[VDecoder]:获取clip,", clip);
+          if (decode) {
+            this.start = Date.now();
+            this.cacheBufferTotal = clip.data.length;
+            this.decodeNext(clip.id);
+          }
+          return Promise.resolve(clip);
+        } else {
+          console.warn("[VDecoder]:fetch取帧为空", rangeFetch);
+        }
+      })
+      .catch((error) => {
+        console.log("error", error);
+      });
   }
   /**
    * @param {Uint8Array} h264Nal
@@ -130,6 +149,7 @@ export class VDecoder extends EventEmitter {
     if (nextFrame) {
       this.decode(nextFrame, tempId);
     } else {
+      console.log("tempVideos", this.tempVideos.length);
       const clip = this.tempVideos.find(({ id }) => id === this.decodingId);
       if (clip) {
         const fps = (1000 / (Date.now() - this.start)) * clip.data.length;
@@ -138,10 +158,12 @@ export class VDecoder extends EventEmitter {
             Date.now() - this.start
           }ms @ ${fps >> 0}FPS`
         );
+      } else {
+        console.warn("不存在clip");
       }
 
       this.decoding = false;
-      this.decodingId = null;
+      // this.decodingId = null;
       tempId = 0;
       clip && clip.id && this.emit("decodeDone", clip.id);
     }

+ 16 - 7
src/h264Decoder/index.js

@@ -5,6 +5,13 @@ import { initWebGLCanvas, draw } from "../video/test.js";
 
 // decoder
 
+const socket = io("ws://192.168.0.150:3000", {
+  reconnectionDelayMax: 10000,
+});
+socket.on("connect", (data) => {
+  console.log("socket connect");
+});
+
 const vDecoder = new VDecoder({
   maxChip: 100,
 });
@@ -15,8 +22,8 @@ vDecoder.on("ready", () => {
   initWebGLCanvas();
 
   vDecoder.fetch({
-    path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/earth",
-    range: [8, 100],
+    path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/1011",
+    range: [0, 66],
   });
 
   vDecoder.on("fetchDone", (clip) => {
@@ -26,16 +33,18 @@ vDecoder.on("ready", () => {
   vDecoder.on("decodeData", (data) => {
     // console.log("decodeData", data);
     const { width, height, data: buffer } = data;
-    draw(new Uint8Array(buffer), width, height);
+    // draw(new Uint8Array(buffer), width, height);
+    // window.updateTexture( new Uint8Array(buffer) );
+    // window.up
   });
 
   vDecoder.on("decodeDone", async (id) => {
     let clipId = null;
     // vDecoder.flush();
-    clipId = await vDecoder.fetch({
-      path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/14_test",
-      range: [0, 28],
-    });
+    // vDecoder.fetch({
+    //   path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/1011",
+    //   range: [0, 66],
+    // });
     // console.log("clipId", clipId);
   });
 });

+ 1 - 1
src/video/test.js

@@ -46,7 +46,7 @@ function draw(buffer, width, height) {
     lumaSize + 2 * chromaSize
   );
 //   console.log("yBuffer", 1);
-  window.updateTexture(yBuffer);
+//   window.updateTexture(yBuffer);
 
   const chromaHeight = height >> 1;
   const chromaStride = stride >> 1;