Bläddra i källkod

Merge branch 'master' of http://192.168.0.115:3000/xushiting/Metaverse

zhouenguang 3 år sedan
förälder
incheckning
662a87b990
8 ändrade filer med 181 tillägg och 154 borttagningar
  1. 31 69
      dist/js/video.js
  2. 1 1
      dist/js/video.js.map
  3. 1 0
      dist/texture.html
  4. 14 52
      src/ActionsHandler.js
  5. 62 1
      src/Xverse_Room.js
  6. 55 23
      src/h264Decoder/VDecoder.js
  7. 16 7
      src/h264Decoder/index.js
  8. 1 1
      src/video/test.js

+ 31 - 69
dist/js/video.js

@@ -1,6 +1,6 @@
 /**
 * Name: Metaverse
-* Date: 2022/4/21
+* Date: 2022/4/24
 * Author: https://www.4dkankan.com
 * Copyright © 2022 4DAGE Co., Ltd. All rights reserved.
 * Licensed under the GLP license
@@ -2174,12 +2174,16 @@
             var id = _ref3.id;
             return id === _this4.decodingId;
           });
-          var fps = 1000 / (Date.now() - this.start) * clip.data.length;
-          console.log("Decoded ".concat(clip.data.length, " frames in ").concat(Date.now() - this.start, "ms @ ").concat(fps >> 0, "FPS"));
+
+          if (clip) {
+            var fps = 1000 / (Date.now() - this.start) * clip.data.length;
+            console.log("Decoded ".concat(clip.data.length, " frames in ").concat(Date.now() - this.start, "ms @ ").concat(fps >> 0, "FPS"));
+          }
+
           this.decoding = false;
           this.decodingId = null;
           tempId = 0;
-          this.emit("decodeDone", clip.id);
+          clip && clip.id && this.emit("decodeDone", clip.id);
         }
       }
     }, {
@@ -2566,65 +2570,25 @@
   }();
 
   var canvas = null;
-  var yuvSurfaceShader = null;
-  var yTexture = null;
-  var uTexture = null;
-  var vTexture = null;
 
   function initWebGLCanvas() {
     canvas = document.createElement("canvas");
     canvas.id = "test_canvas";
     canvas.style = "position: fixed;top:0;left: 0;z-index: 100;";
     var gl = canvas.getContext("webgl");
-    yuvSurfaceShader = YUVSurfaceShader.create(gl);
-    yTexture = Texture.create(gl, gl.LUMINANCE);
-    uTexture = Texture.create(gl, gl.LUMINANCE);
-    vTexture = Texture.create(gl, gl.LUMINANCE);
+    YUVSurfaceShader.create(gl);
+    Texture.create(gl, gl.LUMINANCE);
+    Texture.create(gl, gl.LUMINANCE);
+    Texture.create(gl, gl.LUMINANCE);
     document.body.append(canvas);
   }
 
-  function draw(buffer, width, height) {
-    canvas.width = width;
-    canvas.height = height; // the width & height returned are actually padded, so we have to use the frame size to get the real image dimension
-    // when uploading to texture
-
-    var stride = width; // stride
-    // height is padded with filler rows
-    // if we knew the size of the video before encoding, we could cut out the black filler pixels. We don't, so just set
-    // it to the size after encoding
-
-    var sourceWidth = width;
-    var sourceHeight = height;
-    var maxXTexCoord = sourceWidth / stride;
-    var maxYTexCoord = sourceHeight / height;
-    var lumaSize = stride * height;
-    var chromaSize = lumaSize >> 2;
-    var yBuffer = buffer.subarray(0, lumaSize);
-    var uBuffer = buffer.subarray(lumaSize, lumaSize + chromaSize);
-    var vBuffer = buffer.subarray(lumaSize + chromaSize, lumaSize + 2 * chromaSize); //   console.log("yBuffer", 1);
-
-    window.updateTexture(yBuffer);
-    var chromaHeight = height >> 1;
-    var chromaStride = stride >> 1; // we upload the entire image, including stride padding & filler rows. The actual visible image will be mapped
-    // from texture coordinates as to crop out stride padding & filler rows using maxXTexCoord and maxYTexCoord.
-
-    yTexture.image2dBuffer(yBuffer, stride, height);
-    uTexture.image2dBuffer(uBuffer, chromaStride, chromaHeight);
-    vTexture.image2dBuffer(vBuffer, chromaStride, chromaHeight);
-    yuvSurfaceShader.setTexture(yTexture, uTexture, vTexture);
-    yuvSurfaceShader.updateShaderData({
-      w: width,
-      h: height
-    }, {
-      maxXTexCoord,
-      maxYTexCoord
-    }); // debugger
-    // data = window.changeTexture(data);
-    // window.updateTexture( data );
-
-    yuvSurfaceShader.draw();
-  }
-
+  var socket = io("ws://192.168.0.150:3000", {
+    reconnectionDelayMax: 10000
+  });
+  socket.on("connect", function (data) {
+    console.log("socket connect");
+  });
   var vDecoder = new VDecoder({
     maxChip: 100
   });
@@ -2633,8 +2597,8 @@
 
     initWebGLCanvas();
     vDecoder.fetch({
-      path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/earth",
-      range: [8, 100]
+      path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/1011",
+      range: [0, 66]
     });
     vDecoder.on("fetchDone", function (clip) {
       console.log("fetchDone", clip);
@@ -2642,10 +2606,11 @@
 
     vDecoder.on("decodeData", function (data) {
       // console.log("decodeData", data);
-      var width = data.width,
-          height = data.height,
-          buffer = data.data;
-      draw(new Uint8Array(buffer), width, height);
+      data.width;
+          data.height;
+          data.data; // draw(new Uint8Array(buffer), width, height);
+      // window.updateTexture( new Uint8Array(buffer) );
+      // window.up
     });
     vDecoder.on("decodeDone", /*#__PURE__*/function () {
       var _ref = _asyncToGenerator( /*#__PURE__*/regenerator.mark(function _callee(id) {
@@ -2653,16 +2618,13 @@
           while (1) {
             switch (_context.prev = _context.next) {
               case 0:
+                // vDecoder.fetch({
+                //   path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/1011",
+                //   range: [0, 66],
+                // });
+                // console.log("clipId", clipId);
 
-                _context.next = 3;
-                return vDecoder.fetch({
-                  path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/14_test",
-                  range: [0, 28]
-                });
-
-              case 3:
-
-              case 4:
+              case 1:
               case "end":
                 return _context.stop();
             }

Filskillnaden har hållts tillbaka eftersom den är för stor
+ 1 - 1
dist/js/video.js.map


+ 1 - 0
dist/texture.html

@@ -23,6 +23,7 @@
     <script src="./libs/babylonjs.serializers.min.js"></script>
     <script src="./libs/babylon.gui.min.js"></script>
     <script src="./libs/babylon.inspector.bundle.js"></script>
+    <script src="https://cdn.socket.io/4.4.1/socket.io.min.js"></script>
         <script src="js/video.js"></script>
         <style>
             html, body {

+ 14 - 52
src/ActionsHandler.js

@@ -8,6 +8,7 @@ import Logger from "./Logger.js"
 import Response from "./Response.js"
 import { VDecoder } from "./h264Decoder/VDecoder.js";
 
+window.currentFrame = null
 const logger = new Logger('actions-handler')
 const QueueActions = [Actions.Transfer, Actions.ChangeSkin, Actions.GetOnVehicle, Actions.GetOffVehicle];
 export default class ActionsHandler {
@@ -389,64 +390,25 @@ export default class ActionsHandler {
 
         let efns = Math.round(endRotation/6)
 
+        if(window.currentFrame == null){
+            window.currentFrame = efns
+        }
+        else if(window.currentFrame != efns){
+            window.currentFrame = efns
+        }
+        else if(window.currentFrame == efns){
+            return
+        }
+        window.camera_endRotation = endRotation
+        window.star_angle = angle
         console.log('取帧:'+sfns+','+efns);
         this.fetchData({
             type:type,
             sFrame:sfns,
             eFrame:efns
         })
-
-        this.room.sceneManager.materialComponent.initreceveFrames()
-
-
-        let cameraPostion0 = util.xversePosition2Ue4(window.room.sceneManager.cameraComponent.mainCamera.position.clone())
-        let playerPosition = window.room.sceneManager.avatarComponent._mainUser.position
-
-        let rotationQuaternion = BABYLON.Quaternion.RotationAxis( new BABYLON.Vector3(0, 0, 1), angle )
-        let cameraPos = new BABYLON.Vector3( cameraPostion0.x, cameraPostion0.y, cameraPostion0.z )
-        let cameraCenter = new BABYLON.Vector3( playerPosition.x, playerPosition.y, playerPosition.z )
-        // console.error(cameraPos, cameraCenter)
-
-        cameraPos.rotateByQuaternionAroundPointToRef(rotationQuaternion, cameraCenter, cameraPos)
-
-        let cameraState = {
-            "position": {
-                "x": cameraPos.x,
-                "y": cameraPos.y,
-                "z": cameraPos.z
-            },
-            "angle": {
-                "pitch": 0,
-                "yaw": endRotation,
-                "roll": 0
-            }
-        }
-
-        let response = JSON.parse(JSON.stringify(Response))
-
-        response.signal.newUserStates[0].userId = nickname
-
-        response.signal.newUserStates[0].playerState.player.position.x = this.room.avatarManager.avatars.get(nickname).position.x
-        response.signal.newUserStates[0].playerState.player.position.y = this.room.avatarManager.avatars.get(nickname).position.y
-        response.signal.newUserStates[0].playerState.player.position.z = this.room.avatarManager.avatars.get(nickname).position.z
-
-        response.signal.newUserStates[0].playerState.player.angle.pitch = this.room.avatarManager.avatars.get(nickname).rotation.pitch
-        response.signal.newUserStates[0].playerState.player.angle.yaw = this.room.avatarManager.avatars.get(nickname).rotation.yaw
-        response.signal.newUserStates[0].playerState.player.angle.roll = this.room.avatarManager.avatars.get(nickname).rotation.roll
-
-        response.signal.newUserStates[0].playerState.camera.position.x = cameraState.position.x
-        response.signal.newUserStates[0].playerState.camera.position.y = cameraState.position.y
-        response.signal.newUserStates[0].playerState.camera.position.z = cameraState.position.z
         
-        response.signal.newUserStates[0].playerState.camera.angle.pitch = cameraState.angle.pitch
-        response.signal.newUserStates[0].playerState.camera.angle.yaw = cameraState.angle.yaw
-        response.signal.newUserStates[0].playerState.camera.angle.roll = cameraState.angle.roll
-
-        response.signal.newUserStates[0].playerState.cameraCenter.x = this.room.avatarManager.avatars.get(nickname).position.x
-        response.signal.newUserStates[0].playerState.cameraCenter.y = this.room.avatarManager.avatars.get(nickname).position.y
-        response.signal.newUserStates[0].playerState.cameraCenter.z = this.room.avatarManager.avatars.get(nickname).position.z
-
-        this.room.signal.handleSignal(response)
+        this.room.doRotate()
         //window.room.sceneManager.cameraComponent.mainCamera.rotation.y += angle
         //window.room.sceneManager.cameraComponent.setCameraPose(cameraState)
     }
@@ -504,7 +466,7 @@ export default class ActionsHandler {
     fetchData({sFrame: sfns, eFrame: efns}){
         if(window.workerReady){
             this.vDecoder.fetch({
-                path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/earth",
+                path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/1011",
                 range: [sfns, efns],
               });
         }

+ 62 - 1
src/Xverse_Room.js

@@ -24,6 +24,7 @@ import EImageQuality from "./enum/EImageQuality.js"
 import Panorama from "./Panorama.js"
 import Debug from "./Debug.js"
 import Logger from "./Logger.js"
+import Response from "./Response.js"
 
 const logger = new Logger('xverse-room')
 export default class Xverse_Room extends EventEmitter {
@@ -87,8 +88,10 @@ export default class Xverse_Room extends EventEmitter {
                     if (s.stream) {
                         r || (r = !0,logger.info("Invoke updateRawYUVData"));
                         this.isUpdatedRawYUVData = !1;
-                        const fov = (l = this._currentState.skin) == null ? void 0 : l.fov;
+                        const fov = (l = this._currentState.skin) == null ? void 0 : l.fov;0
+                        
                         this.sceneManager.materialComponent.updateRawYUVData(s.stream, s.width, s.height, fov);
+                        
                         this.isUpdatedRawYUVData = !0
                     }
                     if(!e){
@@ -142,6 +145,64 @@ export default class Xverse_Room extends EventEmitter {
         })
     }
 
+    doRotate(){
+        if(typeof(window.star_angle) == 'undefined'){
+            return
+        }
+        let angle = window.star_angle
+        this.sceneManager.materialComponent.initreceveFrames()
+
+        let cameraPostion0 = util.xversePosition2Ue4(this.sceneManager.cameraComponent.mainCamera.position.clone())
+        let playerPosition = this.sceneManager.avatarComponent._mainUser.position
+
+        let rotationQuaternion = BABYLON.Quaternion.RotationAxis( new BABYLON.Vector3(0, 0, 1), angle )
+        let cameraPos = new BABYLON.Vector3( cameraPostion0.x, cameraPostion0.y, cameraPostion0.z )
+        let cameraCenter = new BABYLON.Vector3( playerPosition.x, playerPosition.y, playerPosition.z )
+        // console.error(cameraPos, cameraCenter)
+
+        cameraPos.rotateByQuaternionAroundPointToRef(rotationQuaternion, cameraCenter, cameraPos)
+
+        let cameraState = {
+            "position": {
+                "x": cameraPos.x,
+                "y": cameraPos.y,
+                "z": cameraPos.z
+            },
+            "angle": {
+                "pitch": 0,
+                "yaw": window.camera_endRotation,
+                "roll": 0
+            }
+        }
+
+        let response = JSON.parse(JSON.stringify(Response))
+
+        response.signal.newUserStates[0].userId = nickname
+
+        response.signal.newUserStates[0].playerState.player.position.x = this.avatarManager.avatars.get(nickname).position.x
+        response.signal.newUserStates[0].playerState.player.position.y = this.avatarManager.avatars.get(nickname).position.y
+        response.signal.newUserStates[0].playerState.player.position.z = this.avatarManager.avatars.get(nickname).position.z
+
+        response.signal.newUserStates[0].playerState.player.angle.pitch = this.avatarManager.avatars.get(nickname).rotation.pitch
+        response.signal.newUserStates[0].playerState.player.angle.yaw = this.avatarManager.avatars.get(nickname).rotation.yaw
+        response.signal.newUserStates[0].playerState.player.angle.roll = this.avatarManager.avatars.get(nickname).rotation.roll
+
+        response.signal.newUserStates[0].playerState.camera.position.x = cameraState.position.x
+        response.signal.newUserStates[0].playerState.camera.position.y = cameraState.position.y
+        response.signal.newUserStates[0].playerState.camera.position.z = cameraState.position.z
+        
+        response.signal.newUserStates[0].playerState.camera.angle.pitch = cameraState.angle.pitch
+        response.signal.newUserStates[0].playerState.camera.angle.yaw = cameraState.angle.yaw
+        response.signal.newUserStates[0].playerState.camera.angle.roll = cameraState.angle.roll
+
+        response.signal.newUserStates[0].playerState.cameraCenter.x = this.avatarManager.avatars.get(nickname).position.x
+        response.signal.newUserStates[0].playerState.cameraCenter.y = this.avatarManager.avatars.get(nickname).position.y
+        response.signal.newUserStates[0].playerState.cameraCenter.z = this.avatarManager.avatars.get(nickname).position.z
+        
+        console.log('更新相机角度')
+        this.signal.handleSignal(response)
+    }
+
     get currentNetworkOptions() {
         return this._currentNetworkOptions
     }

+ 55 - 23
src/h264Decoder/VDecoder.js

@@ -48,10 +48,15 @@ export class VDecoder extends EventEmitter {
       switch (message.type) {
         case "pictureReady":
           //   onPictureReady(message);
+          console.log(
+            "[VDecoder]::decodeData",
+            Object.assign(message, { clipId: this.decodingId })
+          );
           this.emit(
             "decodeData",
             Object.assign(message, { clipId: this.decodingId })
           );
+
           if (this.decoding && this.decodingId) {
             this.decodeNext(this.decodingId);
           }
@@ -81,9 +86,22 @@ export class VDecoder extends EventEmitter {
       this.flush();
       console.log("flush");
     }
-    const rangeFetch = range(rangeArray[0], rangeArray[1] + 1);
-    // console.log("url", url);
-    // console.log("rangeFetch", rangeFetch);
+
+    let rangeFetch = [];
+
+    if (rangeArray[0] < 0 || rangeArray[1] < 0) {
+      console.error("[VDecoder]:range: 非法", `${[rangeArray[0], rangeArray[1]]}`);
+      return
+    }
+
+    if (rangeArray[0] < rangeArray[1]) {
+      rangeFetch = range(rangeArray[0], rangeArray[1] + 1);
+      console.log("[VDecoder]:顺时 +", rangeFetch);
+    } else {
+      rangeFetch = range(rangeArray[1], rangeArray[0] + 1).reverse();
+      console.log("[VDecoder]:逆时 -", rangeFetch);
+    }
+
     const allFetch = rangeFetch.map((i) => {
       return fetch(`${url}/${i}`).then((response) => {
         return response.arrayBuffer().then(function (buffer) {
@@ -92,18 +110,27 @@ export class VDecoder extends EventEmitter {
       });
     });
 
-    return Promise.all(allFetch).then((data) => {
-      const clip = { id: uuidv4(), data };
-      this.emit("fetchDone", clip);
-      this.cacheBuffer = data.slice();
-      this.tempVideos.push(clip);
-      if (decode) {
-        this.start = Date.now();
-        this.cacheBufferTotal = clip.data.length;
-        this.decodeNext(clip.id);
-      }
-      return Promise.resolve(clip);
-    });
+    return Promise.all(allFetch)
+      .then((data) => {
+        const clip = { id: uuidv4(), data: data };
+        if (data.length > 0) {
+          this.emit("fetchDone", clip);
+          this.cacheBuffer = data.slice();
+          this.tempVideos.push(clip);
+          console.log("[VDecoder]:获取clip,", clip);
+          if (decode) {
+            this.start = Date.now();
+            this.cacheBufferTotal = clip.data.length;
+            this.decodeNext(clip.id);
+          }
+          return Promise.resolve(clip);
+        } else {
+          console.warn("[VDecoder]:fetch取帧为空", rangeFetch);
+        }
+      })
+      .catch((error) => {
+        console.log("error", error);
+      });
   }
   /**
    * @param {Uint8Array} h264Nal
@@ -130,18 +157,23 @@ export class VDecoder extends EventEmitter {
     if (nextFrame) {
       this.decode(nextFrame, tempId);
     } else {
+      console.log("tempVideos", this.tempVideos.length);
       const clip = this.tempVideos.find(({ id }) => id === this.decodingId);
-      const fps = (1000 / (Date.now() - this.start)) * clip.data.length;
-      console.log(
-        `Decoded ${clip.data.length} frames in ${Date.now() - this.start}ms @ ${
-          fps >> 0
-        }FPS`
-      );
+      if (clip) {
+        const fps = (1000 / (Date.now() - this.start)) * clip.data.length;
+        console.log(
+          `Decoded ${clip.data.length} frames in ${
+            Date.now() - this.start
+          }ms @ ${fps >> 0}FPS`
+        );
+      } else {
+        console.warn("不存在clip");
+      }
 
       this.decoding = false;
-      this.decodingId = null;
+      // this.decodingId = null;
       tempId = 0;
-      this.emit("decodeDone", clip.id);
+      clip && clip.id && this.emit("decodeDone", clip.id);
     }
   }
   flush() {

+ 16 - 7
src/h264Decoder/index.js

@@ -5,6 +5,13 @@ import { initWebGLCanvas, draw } from "../video/test.js";
 
 // decoder
 
+const socket = io("ws://192.168.0.150:3000", {
+  reconnectionDelayMax: 10000,
+});
+socket.on("connect", (data) => {
+  console.log("socket connect");
+});
+
 const vDecoder = new VDecoder({
   maxChip: 100,
 });
@@ -15,8 +22,8 @@ vDecoder.on("ready", () => {
   initWebGLCanvas();
 
   vDecoder.fetch({
-    path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/earth",
-    range: [8, 100],
+    path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/1011",
+    range: [0, 66],
   });
 
   vDecoder.on("fetchDone", (clip) => {
@@ -26,16 +33,18 @@ vDecoder.on("ready", () => {
   vDecoder.on("decodeData", (data) => {
     // console.log("decodeData", data);
     const { width, height, data: buffer } = data;
-    draw(new Uint8Array(buffer), width, height);
+    // draw(new Uint8Array(buffer), width, height);
+    // window.updateTexture( new Uint8Array(buffer) );
+    // window.up
   });
 
   vDecoder.on("decodeDone", async (id) => {
     let clipId = null;
     // vDecoder.flush();
-    clipId = await vDecoder.fetch({
-      path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/14_test",
-      range: [0, 28],
-    });
+    // vDecoder.fetch({
+    //   path: "https://laser-data.oss-cn-shenzhen.aliyuncs.com/test-video/1011",
+    //   range: [0, 66],
+    // });
     // console.log("clipId", clipId);
   });
 });

+ 1 - 1
src/video/test.js

@@ -46,7 +46,7 @@ function draw(buffer, width, height) {
     lumaSize + 2 * chromaSize
   );
 //   console.log("yBuffer", 1);
-  window.updateTexture(yBuffer);
+//   window.updateTexture(yBuffer);
 
   const chromaHeight = height >> 1;
   const chromaStride = stride >> 1;