|
@@ -2051,6 +2051,10 @@
|
|
|
switch (message.type) {
|
|
|
case "pictureReady":
|
|
|
// onPictureReady(message);
|
|
|
+ // console.log(
|
|
|
+ // "[VDecoder]::decodeData",
|
|
|
+ // Object.assign(message, { clipId: this.decodingId })
|
|
|
+ // );
|
|
|
_this2.emit("decodeData", Object.assign(message, {
|
|
|
clipId: _this2.decodingId
|
|
|
}));
|
|
@@ -2110,8 +2114,20 @@
|
|
|
console.log("flush");
|
|
|
}
|
|
|
|
|
|
- var rangeFetch = range$1(rangeArray[0], rangeArray[1] + 1); // console.log("url", url);
|
|
|
- // console.log("rangeFetch", rangeFetch);
|
|
|
+ var rangeFetch = [];
|
|
|
+
|
|
|
+ if (rangeArray[0] < 0 || rangeArray[1] < 0) {
|
|
|
+ console.error("[VDecoder]:range: 非法", "".concat([rangeArray[0], rangeArray[1]]));
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ if (rangeArray[0] < rangeArray[1]) {
|
|
|
+ rangeFetch = range$1(rangeArray[0], rangeArray[1] + 1);
|
|
|
+ console.log("[VDecoder]:顺时 +", rangeFetch);
|
|
|
+ } else {
|
|
|
+ rangeFetch = range$1(rangeArray[1], rangeArray[0] + 1).reverse();
|
|
|
+ console.log("[VDecoder]:逆时 -", rangeFetch);
|
|
|
+ }
|
|
|
|
|
|
var allFetch = rangeFetch.map(function (i) {
|
|
|
return fetch("".concat(url, "/").concat(i)).then(function (response) {
|
|
@@ -2123,23 +2139,31 @@
|
|
|
return Promise.all(allFetch).then(function (data) {
|
|
|
var clip = {
|
|
|
id: v4(),
|
|
|
- data
|
|
|
+ data: data
|
|
|
};
|
|
|
|
|
|
- _this3.emit("fetchDone", clip);
|
|
|
+ if (data.length > 0) {
|
|
|
+ _this3.emit("fetchDone", clip);
|
|
|
|
|
|
- _this3.cacheBuffer = data.slice();
|
|
|
+ _this3.cacheBuffer = data.slice();
|
|
|
|
|
|
- _this3.tempVideos.push(clip);
|
|
|
+ _this3.tempVideos.push(clip);
|
|
|
|
|
|
- if (decode) {
|
|
|
- _this3.start = Date.now();
|
|
|
- _this3.cacheBufferTotal = clip.data.length;
|
|
|
+ console.log("[VDecoder]:获取clip,", clip);
|
|
|
|
|
|
- _this3.decodeNext(clip.id);
|
|
|
- }
|
|
|
+ if (decode) {
|
|
|
+ _this3.start = Date.now();
|
|
|
+ _this3.cacheBufferTotal = clip.data.length;
|
|
|
|
|
|
- return Promise.resolve(clip);
|
|
|
+ _this3.decodeNext(clip.id);
|
|
|
+ }
|
|
|
+
|
|
|
+ return Promise.resolve(clip);
|
|
|
+ } else {
|
|
|
+ console.warn("[VDecoder]:fetch取帧为空", rangeFetch);
|
|
|
+ }
|
|
|
+ }).catch(function (error) {
|
|
|
+ console.log("error", error);
|
|
|
});
|
|
|
}
|
|
|
/**
|
|
@@ -2170,6 +2194,7 @@
|
|
|
if (nextFrame) {
|
|
|
this.decode(nextFrame, tempId);
|
|
|
} else {
|
|
|
+ console.log("tempVideos", this.tempVideos.length);
|
|
|
var clip = this.tempVideos.find(function (_ref3) {
|
|
|
var id = _ref3.id;
|
|
|
return id === _this4.decodingId;
|
|
@@ -2178,10 +2203,12 @@
|
|
|
if (clip) {
|
|
|
var fps = 1000 / (Date.now() - this.start) * clip.data.length;
|
|
|
console.log("Decoded ".concat(clip.data.length, " frames in ").concat(Date.now() - this.start, "ms @ ").concat(fps >> 0, "FPS"));
|
|
|
+ } else {
|
|
|
+ console.warn("不存在clip");
|
|
|
}
|
|
|
|
|
|
- this.decoding = false;
|
|
|
- this.decodingId = null;
|
|
|
+ this.decoding = false; // this.decodingId = null;
|
|
|
+
|
|
|
tempId = 0;
|
|
|
clip && clip.id && this.emit("decodeDone", clip.id);
|
|
|
}
|
|
@@ -2570,19 +2597,65 @@
|
|
|
}();
|
|
|
|
|
|
var canvas = null;
|
|
|
+ var yuvSurfaceShader = null;
|
|
|
+ var yTexture = null;
|
|
|
+ var uTexture = null;
|
|
|
+ var vTexture = null;
|
|
|
|
|
|
function initWebGLCanvas() {
|
|
|
canvas = document.createElement("canvas");
|
|
|
canvas.id = "test_canvas";
|
|
|
canvas.style = "position: fixed;top:0;left: 0;z-index: 100;";
|
|
|
var gl = canvas.getContext("webgl");
|
|
|
- YUVSurfaceShader.create(gl);
|
|
|
- Texture.create(gl, gl.LUMINANCE);
|
|
|
- Texture.create(gl, gl.LUMINANCE);
|
|
|
- Texture.create(gl, gl.LUMINANCE);
|
|
|
+ yuvSurfaceShader = YUVSurfaceShader.create(gl);
|
|
|
+ yTexture = Texture.create(gl, gl.LUMINANCE);
|
|
|
+ uTexture = Texture.create(gl, gl.LUMINANCE);
|
|
|
+ vTexture = Texture.create(gl, gl.LUMINANCE);
|
|
|
document.body.append(canvas);
|
|
|
}
|
|
|
|
|
|
+ function draw(buffer, width, height) {
|
|
|
+ canvas.width = width;
|
|
|
+ canvas.height = height; // the width & height returned are actually padded, so we have to use the frame size to get the real image dimension
|
|
|
+ // when uploading to texture
|
|
|
+
|
|
|
+ var stride = width; // stride
|
|
|
+ // height is padded with filler rows
|
|
|
+ // if we knew the size of the video before encoding, we could cut out the black filler pixels. We don't, so just set
|
|
|
+ // it to the size after encoding
|
|
|
+
|
|
|
+ var sourceWidth = width;
|
|
|
+ var sourceHeight = height;
|
|
|
+ var maxXTexCoord = sourceWidth / stride;
|
|
|
+ var maxYTexCoord = sourceHeight / height;
|
|
|
+ var lumaSize = stride * height;
|
|
|
+ var chromaSize = lumaSize >> 2;
|
|
|
+ var yBuffer = buffer.subarray(0, lumaSize);
|
|
|
+ var uBuffer = buffer.subarray(lumaSize, lumaSize + chromaSize);
|
|
|
+ var vBuffer = buffer.subarray(lumaSize + chromaSize, lumaSize + 2 * chromaSize); // console.log("yBuffer", 1);
|
|
|
+ // window.updateTexture(yBuffer);
|
|
|
+
|
|
|
+ var chromaHeight = height >> 1;
|
|
|
+ var chromaStride = stride >> 1; // we upload the entire image, including stride padding & filler rows. The actual visible image will be mapped
|
|
|
+ // from texture coordinates as to crop out stride padding & filler rows using maxXTexCoord and maxYTexCoord.
|
|
|
+
|
|
|
+ yTexture.image2dBuffer(yBuffer, stride, height);
|
|
|
+ uTexture.image2dBuffer(uBuffer, chromaStride, chromaHeight);
|
|
|
+ vTexture.image2dBuffer(vBuffer, chromaStride, chromaHeight);
|
|
|
+ yuvSurfaceShader.setTexture(yTexture, uTexture, vTexture);
|
|
|
+ yuvSurfaceShader.updateShaderData({
|
|
|
+ w: width,
|
|
|
+ h: height
|
|
|
+ }, {
|
|
|
+ maxXTexCoord,
|
|
|
+ maxYTexCoord
|
|
|
+ }); // debugger
|
|
|
+ // data = window.changeTexture(data);
|
|
|
+ // window.updateTexture( data );
|
|
|
+
|
|
|
+ yuvSurfaceShader.draw();
|
|
|
+ }
|
|
|
+
|
|
|
var socket = io("ws://192.168.0.150:3000", {
|
|
|
reconnectionDelayMax: 10000
|
|
|
});
|
|
@@ -2595,21 +2668,21 @@
|
|
|
vDecoder.on("ready", function () {
|
|
|
console.log("ready"); // 测试canvas
|
|
|
|
|
|
- initWebGLCanvas();
|
|
|
- vDecoder.fetch({
|
|
|
- path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/1011",
|
|
|
- range: [0, 66]
|
|
|
- });
|
|
|
+ initWebGLCanvas(); // vDecoder.fetch({
|
|
|
+ // path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/1011",
|
|
|
+ // range: [0, 66],
|
|
|
+ // });
|
|
|
+
|
|
|
vDecoder.on("fetchDone", function (clip) {
|
|
|
console.log("fetchDone", clip);
|
|
|
}); //监听 decodeData
|
|
|
|
|
|
vDecoder.on("decodeData", function (data) {
|
|
|
// console.log("decodeData", data);
|
|
|
- data.width;
|
|
|
- data.height;
|
|
|
- data.data; // draw(new Uint8Array(buffer), width, height);
|
|
|
- // window.updateTexture( new Uint8Array(buffer) );
|
|
|
+ var width = data.width,
|
|
|
+ height = data.height,
|
|
|
+ buffer = data.data;
|
|
|
+ draw(new Uint8Array(buffer), width, height); // window.updateTexture( new Uint8Array(buffer) );
|
|
|
// window.up
|
|
|
});
|
|
|
vDecoder.on("decodeDone", /*#__PURE__*/function () {
|
|
@@ -2637,6 +2710,92 @@
|
|
|
};
|
|
|
}());
|
|
|
});
|
|
|
+ var rtc = new RTCPeerConnection();
|
|
|
+ socket.on("offer", /*#__PURE__*/function () {
|
|
|
+ var _ref2 = _asyncToGenerator( /*#__PURE__*/regenerator.mark(function _callee2(data) {
|
|
|
+ var offer, answer;
|
|
|
+ return regenerator.wrap(function _callee2$(_context2) {
|
|
|
+ while (1) {
|
|
|
+ switch (_context2.prev = _context2.next) {
|
|
|
+ case 0:
|
|
|
+ offer = new RTCSessionDescription({
|
|
|
+ sdp: data.sdp,
|
|
|
+ type: data.type
|
|
|
+ });
|
|
|
+ console.log("offer", offer);
|
|
|
+ rtc.setRemoteDescription(offer);
|
|
|
+ _context2.next = 6;
|
|
|
+ return rtc.createAnswer();
|
|
|
+
|
|
|
+ case 6:
|
|
|
+ answer = _context2.sent;
|
|
|
+ console.log("send-answer", answer);
|
|
|
+ rtc.setLocalDescription(answer);
|
|
|
+ socket.emit("answer", JSON.stringify(answer));
|
|
|
+
|
|
|
+ case 10:
|
|
|
+ case "end":
|
|
|
+ return _context2.stop();
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }, _callee2);
|
|
|
+ }));
|
|
|
+
|
|
|
+ return function (_x2) {
|
|
|
+ return _ref2.apply(this, arguments);
|
|
|
+ };
|
|
|
+ }());
|
|
|
+ socket.on("candidate", function (data) {
|
|
|
+ if (/172\./.test(data.candidate)) return;
|
|
|
+ var candidate = new RTCIceCandidate(data);
|
|
|
+ rtc.addIceCandidate(candidate);
|
|
|
+ console.log("candidate", candidate);
|
|
|
+ });
|
|
|
+
|
|
|
+ rtc.ondatachannel = function (data) {
|
|
|
+ console.log("DataChannel from ", data);
|
|
|
+ var inputChannel = data.channel;
|
|
|
+
|
|
|
+ inputChannel.onopen = function (data) {
|
|
|
+ console.warn("onopen", data);
|
|
|
+ };
|
|
|
+
|
|
|
+ inputChannel.onmessage = function (data) {
|
|
|
+ var id = 0;
|
|
|
+
|
|
|
+ if (data.data) {
|
|
|
+ var h264Nal = new Uint8Array(data.data); // console.warn("onmessage", data);
|
|
|
+
|
|
|
+ vDecoder.worker.postMessage({
|
|
|
+ type: "decode",
|
|
|
+ data: h264Nal.buffer,
|
|
|
+ offset: h264Nal.byteOffset,
|
|
|
+ length: h264Nal.byteLength,
|
|
|
+ renderStateId: id
|
|
|
+ }, [h264Nal.buffer]);
|
|
|
+ id++;
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
+ inputChannel.onclose = function (data) {
|
|
|
+ console.warn("onclose", data);
|
|
|
+ };
|
|
|
+ };
|
|
|
+
|
|
|
+ console.log("rtc", rtc);
|
|
|
+
|
|
|
+ rtc.oniceconnectionstatechange = function (data) {
|
|
|
+ console.log("oniceconnectionstatechange", data);
|
|
|
+ };
|
|
|
+
|
|
|
+ rtc.onicegatheringstatechange = function (data) {
|
|
|
+ console.log("onicegatheringstatechange", data);
|
|
|
+ };
|
|
|
+
|
|
|
+ rtc.onicecandidate = function (data) {
|
|
|
+ console.log("onicecandidate", data);
|
|
|
+ socket.emit("ice_candidate", data.candidate);
|
|
|
+ };
|
|
|
|
|
|
}));
|
|
|
//# sourceMappingURL=video.js.map
|