Browse Source

feat:添加webRTC通讯、以及兼容性处理

leo 1 year ago
parent
commit
7bf35121ca
78 changed files with 15424 additions and 0 deletions
  1. 6 0
      static/screenIos/css/WXtrialInterface.css
  2. 13 0
      static/webRtcYJ/Makefile
  3. 632 0
      static/webRtcYJ/WXdraw.js
  4. 1991 0
      static/webRtcYJ/WXtrialInterface.html
  5. 130 0
      static/webRtcYJ/aac.c
  6. 1 0
      static/webRtcYJ/aac.js
  7. BIN
      static/webRtcYJ/aac.wasm
  8. 21 0
      static/webRtcYJ/build-4.1.1.sh
  9. 4 0
      static/webRtcYJ/buildFaad.sh
  10. 18 0
      static/webRtcYJ/build_decoder.sh
  11. 25 0
      static/webRtcYJ/buildffmpegWasm.sh
  12. 24 0
      static/webRtcYJ/buildwasm.sh
  13. 1022 0
      static/webRtcYJ/css/WXtrialInterface.css
  14. 133 0
      static/webRtcYJ/css/homePage.css
  15. 13 0
      static/webRtcYJ/css/swiper-bundle.min.css
  16. 14 0
      static/webRtcYJ/css/swiper-bundle.min.js
  17. 997 0
      static/webRtcYJ/decoder.c
  18. 175 0
      static/webRtcYJ/decoder.js
  19. 136 0
      static/webRtcYJ/ffmpegTest.c
  20. 1 0
      static/webRtcYJ/ffmpeghelper.js
  21. BIN
      static/webRtcYJ/ffmpeghelper.wasm
  22. 274 0
      static/webRtcYJ/helper.js
  23. BIN
      static/webRtcYJ/img/close.png
  24. BIN
      static/webRtcYJ/img/countdown.png
  25. BIN
      static/webRtcYJ/img/fenxiang_icon.png
  26. BIN
      static/webRtcYJ/img/fenxiang_icon@2x.png
  27. BIN
      static/webRtcYJ/img/goumai_icon(1).png
  28. BIN
      static/webRtcYJ/img/goumai_icon.png
  29. BIN
      static/webRtcYJ/img/goumai_icon@2x(1).png
  30. BIN
      static/webRtcYJ/img/goumai_icon@2x.png
  31. BIN
      static/webRtcYJ/img/guanbi_icon@2x.png
  32. BIN
      static/webRtcYJ/img/jia_bu_icon.png
  33. BIN
      static/webRtcYJ/img/jia_bu_icon@2x.png
  34. BIN
      static/webRtcYJ/img/jia_ke_icon.png
  35. BIN
      static/webRtcYJ/img/jia_ke_icon@2x.png
  36. BIN
      static/webRtcYJ/img/jian_bu_icon.png
  37. BIN
      static/webRtcYJ/img/jian_bu_icon@2x.png
  38. BIN
      static/webRtcYJ/img/jian_ke_icon.png
  39. BIN
      static/webRtcYJ/img/jian_ke_icon@2x.png
  40. BIN
      static/webRtcYJ/img/jianqieban_pic@2x.png
  41. BIN
      static/webRtcYJ/img/kefu_wei_icon.png
  42. BIN
      static/webRtcYJ/img/kefu_wei_icon@2x.png
  43. BIN
      static/webRtcYJ/img/kefu_xuanzhong_icon.png
  44. BIN
      static/webRtcYJ/img/kefu_xuanzhong_icon@2x.png
  45. BIN
      static/webRtcYJ/img/kefurexian_icon.png
  46. BIN
      static/webRtcYJ/img/kefurexian_icon@2x.png
  47. BIN
      static/webRtcYJ/img/kuorong_icon.png
  48. BIN
      static/webRtcYJ/img/kuorong_icon@2x.png
  49. BIN
      static/webRtcYJ/img/qq_icon.png
  50. BIN
      static/webRtcYJ/img/qq_icon@2x.png
  51. BIN
      static/webRtcYJ/img/shijian_icon.png
  52. BIN
      static/webRtcYJ/img/shijian_icon@2x.png
  53. BIN
      static/webRtcYJ/img/smallBell.png
  54. BIN
      static/webRtcYJ/img/wenzi_icon.png
  55. BIN
      static/webRtcYJ/img/wenzi_icon@2x.png
  56. BIN
      static/webRtcYJ/img/wode_wei_icon.png
  57. BIN
      static/webRtcYJ/img/wode_wei_icon@2x.png
  58. BIN
      static/webRtcYJ/img/wode_xuanzhong_icon.png
  59. BIN
      static/webRtcYJ/img/wode_xuanzhong_icon@2x.png
  60. BIN
      static/webRtcYJ/img/xiazai_icon.png
  61. BIN
      static/webRtcYJ/img/xiazai_icon@2x.png
  62. BIN
      static/webRtcYJ/img/xuankang_xuan_icon.png
  63. BIN
      static/webRtcYJ/img/xuankang_xuan_icon@2x.png
  64. BIN
      static/webRtcYJ/img/xuankuang_wei_icon.png
  65. BIN
      static/webRtcYJ/img/xuankuang_wei_icon@2x.png
  66. BIN
      static/webRtcYJ/img/yunshouji_wei_icon.png
  67. BIN
      static/webRtcYJ/img/yunshouji_wei_icon@2x.png
  68. BIN
      static/webRtcYJ/img/yunshouji_xuanzhong_icon.png
  69. BIN
      static/webRtcYJ/img/yunshouji_xuanzhong_icon@2x.png
  70. 4 0
      static/webRtcYJ/jquery-1.11.0.min.js
  71. 13 0
      static/webRtcYJ/jquery-weui.min.js
  72. 130 0
      static/webRtcYJ/pcm-player.js
  73. 1 0
      static/webRtcYJ/pcm-player.min.js
  74. 9103 0
      static/webRtcYJ/rtcEngine.min.js
  75. 209 0
      static/webRtcYJ/spsParser.js
  76. 99 0
      static/webRtcYJ/timer.js
  77. 148 0
      static/webRtcYJ/webgl.js
  78. 87 0
      static/webRtcYJ/websocket.js

+ 6 - 0
static/screenIos/css/WXtrialInterface.css

@@ -63,6 +63,12 @@ body:before {
   z-index: 0;
 }
 
+video {
+  display: block;
+  width: 100%;
+  height: 100%;
+  z-index: 999;
+}
 canvas {
   display: block;
   width: 100%;

+ 13 - 0
static/webRtcYJ/Makefile

@@ -0,0 +1,13 @@
+lib = -lfaad -lm 
+include = -I/home/MyDocumet/faadjs
+path = -L/usr/lib
+CC = gcc
+
+all: aac.o
+	$(CC) aac.o $(include) $(lib) $(path) -o aacTest
+
+aac.o:aac.c
+	echo "生成aac.o"
+	$(CC) aac.c -c $(include) -o aac.o
+clean:
+	rm *.o aacTest

+ 632 - 0
static/webRtcYJ/WXdraw.js

@@ -0,0 +1,632 @@
+// 蒙版
+var canvas_bak = document.getElementById('box');
+
+var winHeight = window.screen.height - window.innerHeight;
+var vowidth = window.screen.width;
+var topwinHeightDraw = window.screen.height - window.innerHeight + 30; //计算title top 头部
+var numse = window.screen.height; // -winHeight
+//计算title top 头部
+if (numse <= 70) {
+  var voheight = window.screen.height - winHeight - 34 - 20;
+} else {
+  var voheight = window.screen.height - topwinHeightDraw - 20;
+}
+
+//画笔大小
+var resolving = 1; // 1: 竖屏;2:横屏;
+var url = window.location.href;
+url = url.split('/');
+
+var parameters = GetRequest();
+
+var videoWidth, videoHeight;
+var isControl = false; // 是否是观看模式
+changIsControl(true);
+var isAuth = parameters['authPhone']; // 是否是获取的云手机
+var wsss;
+var errorTime = 0;
+var first = true;
+function changIsControl(value) {
+  isControl = value;
+  // if(){}
+  $('#open-set-phone-size-dialog-btn').attr('hidden', !value);
+}
+
+function throttle(fn, delay) {
+  var flag = true;
+  return () => {
+    if (!flag) return;
+    flag = false;
+    errorTime += delay;
+    timer = setTimeout(() => {
+      fn();
+      flag = true;
+    }, delay);
+  };
+}
+
+const throttleDoConnectDirectives = throttle(() => {
+  doConnectDirectives();
+}, 100);
+
+function doConnectDirectives() {
+  videoWidth = Number(resolvingPower) ? Number(resolvingPower) : 720;
+  videoHeight = videoWidth === 720 ? 1280 : 1920;
+  wsss = new WebSocket(cUrl);
+  wsss.binaryType = 'arraybuffer';
+
+  wsss.onopen = function () {
+    // 获取虚拟场景状态
+    errorTime = 0;
+    var pings = { type: 'getVsStatus' };
+    wsss.send(JSON.stringify(pings));
+    var bitRate = {
+      data: {
+        bitRate: 1243000,
+      },
+      type: 'bitRate',
+    };
+    wsss.send(JSON.stringify(bitRate));
+    // 进入发起询问
+    var pings2 = {
+      type: 'forwardMsg',
+      data: {
+        code: '3000',
+        userName: username,
+        desc: '询问是否有在控制', // 可选
+      },
+    };
+    wsss.send(JSON.stringify(pings2));
+
+    wsss.send(
+      JSON.stringify({
+        type: 'getPhoneSize',
+      }),
+    );
+  };
+  wsss.onerror = function (e) {
+    // console.log('🚀 ~ file: WXdraw.js ~ line 82 ~ onerror ~ e', e);
+    wsss.close(1006);
+    // throttle(doConnectDirectives, 100);
+    // if (errorTime > 1000) {
+    //   quit();
+    // }
+  };
+  wsss.onclose = function (e) {
+    // console.log('🚀 ~ file: WXdraw.js ~ line 93 ~ onclose ~ e', e);
+    // new WebSocket(e.)
+    // doConnectDirectives();
+    if (e.code === 1006) {
+      // 异常关闭,重连
+      throttleDoConnectDirectives();
+      // doConnectDirectives();
+      // throttle(doConnectDirectives, 100);
+      if (errorTime > 1000) {
+        quit();
+      }
+    }
+  };
+  wsss.onmessage = function (res) {
+    var result = typeof res.data === 'string' ? JSON.parse(res.data) : res.data;
+    console.log(
+      '🚀 ~ file: WXdraw.js ~ line 78 ~ doConnectDirectives ~ result',
+      result,
+    );
+
+    if (result.type === 'cutting') {
+      if (result.data.status === 0) {
+        $.toast('复制成功', 'text');
+      } else {
+        $.toast(result.msg, 'text');
+      }
+      return;
+    }
+    if (result.type === 'forwardMsgRep') {
+      // 当前云机无其他终端在线,获得控制权
+      changIsControl(true);
+    }
+    if (result.type === 'downAdnInstallRep') {
+      $.toast(result.data.msg, 'text', 4000);
+    }
+    if (result.type === 'forwardMsg') {
+      /**
+       * @type {boolean} isControl 当前是否拥有控制权,初始化时为false
+       * @type {string} isAuth 当前云机类型 - huo: 获取的云机,none: 自己的云机
+       * @type {string} username 当前登录的双子星账号username
+       */
+
+      if (result.data.userName !== username) {
+        switch (String(result.data.code)) {
+          case '3000': {
+            if (isControl) {
+              // 回复有控制
+              wsss.send(
+                JSON.stringify({
+                  type: 'forwardMsg',
+                  data: {
+                    code: '4000',
+                    userName: username,
+                    desc: '有控制', // 可选
+                  },
+                }),
+              );
+              return;
+            }
+            // 回复有观看
+            wsss.send(
+              JSON.stringify({
+                type: 'forwardMsg',
+                data: {
+                  code: '4100',
+                  userName: username,
+                  desc: '有观看', // 可选
+                },
+              }),
+            );
+            return;
+          }
+          case '4000': {
+            // console.log(
+            //   '🚀 ~ file: WXdraw.js ~ line 184 ~ doConnectDirectives ~ isAuth',
+            //   isAuth,
+            // );
+            // 当前是获取方
+            if (isAuth === 'huo' && isControl) {
+              $.confirm(
+                '授权方已收回控制权,您进入观看屏幕模式',
+                function () {
+                  //点击确认后的回调函数
+                  changIsControl(false);
+                },
+                function () {
+                  changIsControl(false);
+                  //点击取消后的回调函数
+                  quit();
+                },
+              );
+              return;
+            }
+
+            if (isAuth === 'shou') {
+              // 当前是授权方切没有控制权
+              $.confirm(
+                '当前云手机正在授控,是否请求获取云手机控制权?',
+                function () {
+                  //点击确认后的回调函数
+                  wsss.send(
+                    JSON.stringify({
+                      type: 'forwardMsg',
+                      data: {
+                        code: '5000',
+                        userName: username,
+                        desc: '控制权限收回', // 可选
+                      },
+                    }),
+                  );
+                  changIsControl(true);
+                },
+                function () {
+                  //点击取消后的回调函数
+                  changIsControl(false);
+                },
+              );
+
+              return;
+            }
+          }
+          case '5000': {
+            // if (result.data.username === username) {
+            //   changIsControl(true);
+            //   return;
+            // }
+            if (isAuth === 'huo' && isControl) {
+              $.confirm(
+                '授权方已收回控制权,您进入观看屏幕模式',
+                function () {
+                  //点击确认后的回调函数
+                  changIsControl(false);
+                },
+                function () {
+                  changIsControl(false);
+                  //点击取消后的回调函数
+                  quit();
+                },
+              );
+              return;
+            }
+
+            return;
+          }
+          default: {
+            return;
+          }
+        }
+      }
+      return;
+    }
+    if (result.type === 'payInitiateEvent') {
+      // var url = window.location.href;
+      // url = url.split('/');
+      // var baseUrl = url[0] + '//' + url[2];
+      $.ajax({
+        url: baseUrl + '/api/pay/third/order',
+        data: JSON.stringify({
+          orderNum: result.data.orderNum,
+          orderAmount: result.data.orderAmount,
+          appKey: result.data.appKey,
+        }),
+        type: 'POST',
+        dataType: 'json',
+        contentType: 'application/json;charset=UTF-8',
+        success: function (res) {
+          if (result.data.payType === 1) {
+            // 微信
+            if (window.__wxjs_environment === 'miniprogram') {
+              // 小程序
+              // copyUrl(result.data.payUrl);
+            } else {
+              window.location.href = result.data.payUrl;
+            }
+          } else {
+            window.location.href = result.data.payUrl;
+          }
+        },
+      });
+      return;
+    }
+    if (result.type === 'getPhoneSize' || result.type === 'setPhoneSize') {
+      // console.log(result);
+      if (
+        window.currentPhoneSize &&
+        (window.currentPhoneSize.width !==
+          Math.min(result.data.width, result.data.height) ||
+          window.currentPhoneSize.height !==
+            Math.max(result.data.width, result.data.height) ||
+          window.currentPhoneSize.dpi !== result.data.dpi)
+      ) {
+        // 获取到的分辨率与当前分辨率不符
+
+        const data = window.phoneSizeList.find(function (v) {
+          return (
+            v.width === Math.min(result.data.width, result.data.height) &&
+            v.height === Math.max(result.data.width, result.data.height) &&
+            v.dpi === result.data.dpi
+          );
+        });
+        window.currentPhoneSize = data || {
+          width: Math.min(result.data.width, result.data.height),
+          height: Math.max(result.data.width, result.data.height),
+          dpi: result.data.dpi,
+        };
+
+        if (result.type === 'setPhoneSize') {
+          lastSetPhone = Date.now();
+        }
+        // if (result.type === 'getPhoneSize') {
+        // 上报给后端
+        $.ajax({
+          url:
+            baseUrl +
+            '/api/resources/v5/machine/resolution/operationResolvingPower',
+          headers: {
+            Authorization: token,
+          },
+          type: 'post',
+          dataType: 'json',
+          contentType: 'application/json; charset=UTF-8',
+          data: JSON.stringify({
+            userCardId: window.userCardId,
+            resolvingPowerId: window.currentPhoneSize.id,
+          }),
+        });
+      }
+      // }
+
+      updateDB(db, storeName, {
+        id: userCardId,
+        socketURL: socketURL,
+        cUrl: cUrl,
+        cardToken: cardToken,
+        resolvingPower: resolvingPower,
+        width: window.currentPhoneSize.width,
+        height: window.currentPhoneSize.height,
+        dpi: window.currentPhoneSize.dpi,
+      });
+
+      return;
+    }
+
+    if (result.type === 'reProduceText') {
+      // 接收到云机剪贴板复制事件
+      // window.copyToClipboard(result.data.text);
+
+      if (navigator.clipboard) {
+        navigator.clipboard.writeText(result.data.text);
+      }
+    }
+  };
+}
+$('body').on('click', function () {
+  draw_graph('pencil', this);
+});
+//剪切板
+$('.upload').on('click', function () {
+  var texts = $(this).attr('data-text');
+  if (texts == 'uploads') {
+    $('.mainbox').css({
+      display: 'block',
+    });
+    $('.sbox').css({
+      display: 'none',
+    });
+  }
+});
+
+//home 控制home
+$('.botmat1img').on('click', function () {
+  var codes = $(this).attr('data-text');
+  if (codes == 'home' && isControl) {
+    wsss.send(ExexuteKeyBoard(3));
+  } else if (codes == 'return' && isControl) {
+    wsss.send(ExexuteKeyBoard(4));
+  } else if (codes == 'gengduo' && isControl) {
+    wsss.send(ExexuteKeyBoard(187));
+  }
+});
+// 高清控制
+$('.PictureQuality').on('click', function () {
+  if (!isControl) {
+    return;
+  }
+  $(this).addClass('avit').siblings().removeClass('avit');
+  var id = $(this).attr('data-id');
+  var cmd = {
+    type: 'switchSharpness',
+  };
+
+  decodeWoker.postMessage(cmd); //通知解码器worker切换分辨率
+  var buffer = makeSharpness(Number(id));
+  webSocketWorker.postMessage(buffer);
+});
+var canDraw = false;
+//画图形
+var draw_graph = function (graphType) {
+  //把蒙版放于画板上面
+  $('#container').css('z-index', 30);
+  $('#dedit').css('z-index', 20);
+  // 先画在蒙版上 再复制到画布上
+  //鼠标按下获取 开始xy开始画图
+  // var ongoingTouches = [];
+  var touchstart = function (e) {
+    // console.log('🚀 ~ file: WXdraw.js ~ line 244 ~ touchstart ~ e', e);
+
+    if (!isControl) {
+      return;
+    }
+    const action = 0;
+    Array.from(e.originalEvent.changedTouches).forEach(function (item, index) {
+      const x = item.clientX - item.target.getBoundingClientRect().x;
+      const y = item.clientY - item.target.getBoundingClientRect().y;
+      return wsss.send(
+        JSON.stringify({
+          type: 'event',
+          data: {
+            action,
+            count: e.originalEvent.touches.length,
+            pointerId: item.identifier,
+            x: (function () {
+              return (
+                resolving
+                  ? x * (window.currentPhoneSize.width / vowidth)
+                  : y * (window.currentPhoneSize.height / voheight)
+              ).toFixed(2);
+            })(),
+            y: (function () {
+              return (
+                resolving
+                  ? y * (window.currentPhoneSize.height / voheight)
+                  : (vowidth - x) * (window.currentPhoneSize.width / vowidth)
+              ).toFixed(2);
+            })(),
+          },
+        }),
+      );
+    });
+    canDraw = true;
+  };
+
+  //鼠标离开 把蒙版canvas的图片生成到canvas中
+  var touchend = function (e) {
+    if (!isControl) {
+      return;
+    }
+    const action = 1;
+    Array.from(e.originalEvent.changedTouches).forEach(function (item, index) {
+      const x = item.clientX - item.target.getBoundingClientRect().x;
+      const y = item.clientY - item.target.getBoundingClientRect().y;
+      return wsss.send(
+        JSON.stringify({
+          type: 'event',
+          data: {
+            action,
+            count: e.originalEvent.touches.length,
+            pointerId: item.identifier,
+            x: (function () {
+              return (
+                resolving
+                  ? x * (window.currentPhoneSize.width / vowidth)
+                  : y * (window.currentPhoneSize.height / voheight)
+              ).toFixed(2);
+            })(),
+            y: (function () {
+              return (
+                resolving
+                  ? y * (window.currentPhoneSize.height / voheight)
+                  : (vowidth - x) * (window.currentPhoneSize.width / vowidth)
+              ).toFixed(2);
+            })(),
+          },
+        }),
+      );
+    });
+    // var touchfor = e.originalEvent.changedTouches; //for 的手指数组
+    // //是否横屏
+    // for (var i = 0; i < touchfor.length; i++) {
+    //   // var acrossWidthX = touchfor[i].pageY * (videoHeight / voheight);
+    //   // var acrossHeightY =
+    //   //   videoWidth - touchfor[i].pageX * (videoWidth / vowidth);
+    //   // if (resolving) {
+    //   //   var verticalWidthX =
+    //   //     touchfor[i].pageX * (window.currentPhoneSize.width / vowidth);
+    //   //   var verticalHeightY =
+    //   //     touchfor[i].pageY * (window.currentPhoneSize.height / voheight);
+    //   // } else {
+    //   //   var verticalWidthX =
+    //   //     touchfor[i].pageX * (window.currentPhoneSize.width / vowidth);
+    //   //   var verticalHeightY =
+    //   //     touchfor[i].pageY * (window.currentPhoneSize.height / voheight);
+    //   // }
+    //   var ping =
+    //     // resolving == 0 ?
+    //     //   { "data": { "action": 1, "count": ongoingTouches.length, "pointerId": touchfor[i].identifier, "x": acrossWidthX.toFixed(2), "y": acrossHeightY.toFixed(2) }, "type": "event" } :
+    //     {
+    //       data: {
+    //         action: 1,
+    //         count: ongoingTouches.length,
+    //         pointerId: touchfor[i].identifier,
+    //         x: (() =>
+    //           (resolving
+    //             ? touchfor[i].pageX * (window.currentPhoneSize.width / vowidth)
+    //             : touchfor[i].pageY * (window.currentPhoneSize.width / voheight)
+    //           ).toFixed(2))(),
+    //         y: (() =>
+    //           (resolving
+    //             ? touchfor[i].pageY *
+    //               (window.currentPhoneSize.height / voheight)
+    //             : (vowidth - touchfor[i].pageX) *
+    //               (window.currentPhoneSize.height / vowidth)
+    //           ).toFixed(2))(),
+    //       },
+    //       type: 'event',
+    //     };
+    //   wsss.send(JSON.stringify(ping));
+    //   ongoingTouches.forEach(function (item, index) {
+    //     if (item.identifier === touchfor[i].identifier) {
+    //       ongoingTouches.splice(index, 1);
+    //     }
+    //   });
+    // }
+    canDraw = false;
+  };
+
+  //清空层 云手机超出屏幕的开关
+  var clearContext = function () {
+    canDraw = false;
+  };
+
+  // 鼠标移动
+  var touchmove = function (e) {
+    e.preventDefault();
+    if (!isControl) {
+      return;
+    }
+    const action = 2;
+    Array.from(e.originalEvent.changedTouches).forEach(function (item, index) {
+      const x = item.clientX - item.target.getBoundingClientRect().x;
+      const y = item.clientY - item.target.getBoundingClientRect().y;
+      return wsss.send(
+        JSON.stringify({
+          type: 'event',
+          data: {
+            action,
+            count: e.originalEvent.touches.length,
+            pointerId: item.identifier,
+            x: (function () {
+              return (
+                resolving
+                  ? x * (window.currentPhoneSize.width / vowidth)
+                  : y * (window.currentPhoneSize.height / voheight)
+              ).toFixed(2);
+            })(),
+            y: (function () {
+              return (
+                resolving
+                  ? y * (window.currentPhoneSize.height / voheight)
+                  : (vowidth - x) * (window.currentPhoneSize.width / vowidth)
+              ).toFixed(2);
+            })(),
+          },
+        }),
+      );
+    });
+    // var touchfor = e.originalEvent.targetTouches; //for 的手指数组
+    // for (var i = 0; i < touchfor.length; i++) {
+    //   // var acrossWidthX = touchfor[i].pageY * (videoHeight / voheight);
+    //   // var acrossHeightY =
+    //   //   videoWidth - touchfor[i].pageX * (videoWidth / vowidth);
+
+    //   // let verticalWidthX = 0;
+    //   // let verticalHeightY = 0;
+    //   // if (resolving) {
+    //   //   verticalWidthX =
+    //   //     touchfor[i].pageX * (window.currentPhoneSize.width / vowidth);
+    //   //   verticalHeightY =
+    //   //     touchfor[i].pageY * (window.currentPhoneSize.height / voheight);
+    //   // } else {
+    //   //   verticalWidthX =
+    //   //     touchfor[i].pageX * (window.currentPhoneSize.width / vowidth);
+    //   //   verticalHeightY =
+    //   //     touchfor[i].pageY * (window.currentPhoneSize.height / voheight);
+    //   // }
+    //   var ping =
+    //     // resolving == 0 ?
+    //     //   { "data": { "action": 2, "count": touchfor.length, "pointerId": touchfor[i].identifier, "x": acrossWidthX.toFixed(2), "y": acrossHeightY.toFixed(2) }, "type": "event" } :
+    //     {
+    //       data: {
+    //         action: 2,
+    //         count: touchfor.length,
+    //         pointerId: touchfor[i].identifier,
+    //         x: (() =>
+    //           (resolving
+    //             ? touchfor[i].pageX * (window.currentPhoneSize.width / vowidth)
+    //             : touchfor[i].pageY * (window.currentPhoneSize.width / voheight)
+    //           ).toFixed(2))(),
+    //         y: (() =>
+    //           (resolving
+    //             ? touchfor[i].pageY *
+    //               (window.currentPhoneSize.height / voheight)
+    //             : (vowidth - touchfor[i].pageX) *
+    //               (window.currentPhoneSize.height / vowidth)
+    //           ).toFixed(2))(),
+    //       },
+    //       type: 'event',
+    //     };
+    //   wsss.send(JSON.stringify(ping));
+    // }
+  };
+
+  //鼠标离开区域以外 除了涂鸦 都清空
+  var mouseout = function () {
+    if (graphType != 'handwriting') {
+      clearContext();
+    }
+  };
+  $(canvas_bak).off();
+  $(canvas_bak).on('touchstart', touchstart);
+  $(canvas_bak).on('touchmove', touchmove);
+  $(canvas_bak).on('touchend', touchend);
+  $(canvas_bak).on('mouseout', mouseout);
+};
+
+function GetRequest() {
+  var url = location.search; // 获取url中"?"符后的字串
+  var obj = new Object();
+  if (url.indexOf('?') != -1) {
+    var str = url.substr(1);
+    strs = str.split('&');
+    for (var i = 0; i < strs.length; i++) {
+      obj[strs[i].split('=')[0]] = strs[i].split('=')[1];
+    }
+  }
+  return obj;
+}

File diff suppressed because it is too large
+ 1991 - 0
static/webRtcYJ/WXtrialInterface.html


+ 130 - 0
static/webRtcYJ/aac.c

@@ -0,0 +1,130 @@
+#include <memory.h>
+#include <stdlib.h>
+#include "faad.h"
+#include <stdbool.h>
+#include <string.h>
+#include <emscripten.h>
+#include <stdio.h>
+#include <sys/time.h>
+#include <sys/timeb.h>
+#include <unistd.h>
+
+bool hasInit = false;
+
+NeAACDecHandle decoder = 0;
+NeAACDecFrameInfo frame_info;
+
+void PrintArry(unsigned char *buffer, unsigned int size)
+{
+	int i;
+	char data[1024*1024];
+	
+	for(i = 0;i < size;i++)
+	{
+		data[i] = buffer[i];
+	}
+	
+	data[i + 1] = '\0';
+}
+
+int init_decoder(unsigned char* inBuffer, size_t size)
+{  
+    unsigned char channels;
+    unsigned long sampleRate;
+    
+    memset(&frame_info, 0, sizeof(frame_info));
+    decoder = NeAACDecOpen();
+    NeAACDecInit(decoder, inBuffer, size, &sampleRate, &channels);
+    //printf("init_decoder初始化完毕\n");
+    hasInit = true;
+    return 0;
+}
+
+int feedData(unsigned char* out_data, unsigned char* buffer, unsigned int size)
+{
+	int ret = 0;
+	
+    if (!hasInit)
+    {
+        init_decoder(buffer, size);
+    }
+
+    unsigned char *out_buffer = (unsigned char*)NeAACDecDecode(decoder, &frame_info, buffer, size);
+	//printf("frame_info.error %d\n",frame_info.error);
+
+    if (frame_info.error > 0)
+    {		
+        return frame_info.error;
+    }
+    else if(out_buffer && frame_info.samples > 0)//解码成功
+    {
+		ret = frame_info.samples * frame_info.channels;
+		for(int i = 0;i < ret;i++)
+		{
+			 out_data[i] = out_buffer[i];
+		}
+    }
+
+    return ret;
+}
+
+void destroyDecoder()
+{
+	hasInit = false;
+    NeAACDecClose(decoder);
+}
+
+/*bool GetFrame(FILE *file, unsigned char *input, int *len, int *pos)
+{
+    int readByte;
+    int frameLen = 0;
+    unsigned char buffer[6];
+
+    while ((readByte = fread(buffer, 1, 6, file)) > 0)
+    {
+        if ((buffer[0] == 0xff) && ((buffer[1] & 0xf0) == 0xf0))
+        {         
+            frameLen = ((buffer[3] & 0x3) << 11) | ((buffer[4]) << 3) | ((buffer[5]) >> 5);
+            printf("帧长度 %d\n", frameLen);
+            *len = frameLen;
+            fseek(file, *pos, SEEK_SET);
+            fread(input, 1, frameLen, file);
+            *pos = *pos + frameLen;
+            return true;
+        }
+        else
+        {
+            printf("位置没找对\n");
+        }
+    }
+
+    return false;
+}
+
+int main(int argc, char* argv[])
+{
+    int len;
+    int pos = 0;
+    unsigned char buffer[4096] = {0};
+    unsigned char OutBuffer[10240];
+    unsigned char* pcmData = OutBuffer;
+    FILE* file = fopen("test.aac", "rb");
+	if(!file)
+	{
+		printf("找不到AAC文件\n");
+		return -1;
+	}
+	
+    outFile = fopen("shchu.pcm", "wb+");
+
+    while (GetFrame(file, buffer, &len, &pos))
+    {
+        feed_data(pcmData, buffer, len);     
+    }
+
+    fclose(file);
+	fclose(outFile);
+	destroy_decoder();
+	printf("解码完毕\n");
+    return 0;
+}*/

File diff suppressed because it is too large
+ 1 - 0
static/webRtcYJ/aac.js


BIN
static/webRtcYJ/aac.wasm


+ 21 - 0
static/webRtcYJ/build-4.1.1.sh

@@ -0,0 +1,21 @@
+echo "Beginning Build:"
+rm -r dist
+mkdir -p dist
+cd ffmpeg-4.1.1
+echo "emconfigure"
+CPPFLAGS="-D_POSIX_C_SOURCE=200112 -D_XOPEN_SOURCE=600" \
+emconfigure ./configure --cc="emcc" \
+--prefix=$(pwd)/dist --enable-cross-compile --target-os=none --arch=x86_64 \
+--cpu=generic --disable-ffplay --disable-ffprobe --disable-ffserver \
+--disable-asm --disable-doc --disable-devices --disable-pthreads \
+--disable-w32threads --disable-network --disable-hwaccels \
+--disable-parsers --disable-bsfs --disable-debug --disable-protocols \
+--disable-indevs --disable-outdevs --enable-protocol=file
+if [ -f "Makefile" ]; then
+  echo "make clean"
+  make clean
+fi
+echo "make"
+make
+echo "make install"
+make install

+ 4 - 0
static/webRtcYJ/buildFaad.sh

@@ -0,0 +1,4 @@
+#cd /home/github/faad2-2_10_0
+. bootstrap
+emconfigure ./configure --prefix=/usr --enable-shared --without-xmms --without-drm --without-mpeg4ip
+emmake make

+ 18 - 0
static/webRtcYJ/build_decoder.sh

@@ -0,0 +1,18 @@
+echo "Beginning Build:"
+rm -r dist
+mkdir -p dist
+cd ffmpeg-4.2.2
+echo "emconfigure"
+emconfigure ./configure --cc="emcc" --cxx="em++" --ar="emar" --ranlib="emranlib" --prefix=$(pwd)/dist --enable-cross-compile --target-os=none \
+        --arch=x86_32 --cpu=generic --enable-gpl --enable-version3 --disable-avdevice --disable-swresample --disable-postproc --disable-avfilter \
+        --disable-programs --disable-logging --disable-everything --enable-avformat --enable-decoder=hevc --enable-decoder=h264 --enable-decoder=aac \
+        --disable-ffplay --disable-ffprobe --disable-ffserver --disable-asm --disable-doc --disable-devices --disable-network --disable-hwaccels \
+        --disable-parsers --disable-pthreads --disable-w32threads --disable-bsfs --disable-debug --enable-protocol=file --enable-demuxer=mov --enable-demuxer=flv --disable-indevs --disable-outdevs
+if [ -f "Makefile" ]; then
+  echo "make clean"
+  make clean
+fi
+echo "make"
+make
+echo "make install"
+make install

+ 25 - 0
static/webRtcYJ/buildffmpegWasm.sh

@@ -0,0 +1,25 @@
+export TOTAL_MEMORY=104857600
+export EXPORTED_FUNCTIONS="[ \
+    '_openDecoder', \
+    '_feedData', \
+    '_closeDecoder', \
+    '_malloc',\
+    '_free',\
+    '_main'
+]"
+#-lavcodec -lavformat -lavutil -lswresample -lswscale \
+
+echo "运行 Emscripten..."
+emcc  -I "./include" \
+-O3 \
+-s WASM=1 \
+-lavcodec -lavformat -lavutil  -lswscale \
+-s TOTAL_MEMORY=${TOTAL_MEMORY} \
+-L ./lib \
+-s ASSERTIONS=1 \
+-s EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS}" \
+ffmpegTest.c \
+-s RESERVED_FUNCTION_POINTERS=14 \
+-s ALLOW_MEMORY_GROWTH=1 \
+-o ffmpeghelper.js 
+echo "编译完成!"

+ 24 - 0
static/webRtcYJ/buildwasm.sh

@@ -0,0 +1,24 @@
+export TOTAL_MEMORY=10485760
+rm *.js *.wasm
+
+export EXPORTED_FUNCTIONS="[  	\
+	'_malloc' \
+	,'_free' \
+	,'_destroyDecoder' \
+	,'_feedData'   \
+]"
+
+export LIBRARY_FUNCTIONS="[\
+    'malloc', \
+    'free'	  \
+]"
+
+#
+emcc aac.c  \
+-O3 \
+-s WASM=1 \
+-I /usr/local -lfaad -lm -L/usr/local/lib  \
+-s TOTAL_MEMORY=${TOTAL_MEMORY} \
+-s DEFAULT_LIBRARY_FUNCS_TO_INCLUDE="${LIBRARY_FUNCTIONS}" \
+-s EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS}" \
+-o aac.js

File diff suppressed because it is too large
+ 1022 - 0
static/webRtcYJ/css/WXtrialInterface.css


+ 133 - 0
static/webRtcYJ/css/homePage.css

@@ -0,0 +1,133 @@
+* {
+	margin: 0;
+	padding: 0;
+}
+
+.heads {
+	padding: 40px 30px 0px 30px;
+	overflow: hidden;
+}
+
+.heads-left {
+	color: #3399FF;
+}
+
+.heads-right {
+	width: 60px;
+	height: 60px;
+}
+
+.heads-right image {
+	width: 100%;
+	height: 100%;
+}
+
+.left {
+	float: left;
+}
+
+.right {
+	float: right;
+}
+
+#wine {
+	width: 100%;
+	height: 100%;
+}
+
+.newhelp {
+	display: flex;
+	font-size: 12px;
+	font-family: PingFangSC-Regular, PingFang SC;
+	font-weight: 400;
+	color: #999999;
+	line-height: 17px;
+	align-items: center;
+	position: absolute;
+	top: 0;
+	left: 0;
+	margin-top: 10px;
+	margin-left: 15px;
+}
+
+.newhelp .font {
+	text-decoration: underline;
+	margin-left: 2px;
+}
+
+.newhelp .helpImg {
+	width: 0.875rem;
+	height: 0.875rem;
+}
+
+.newhelp .helpImg img {
+	width: 100%;
+	height: 100%;
+}
+
+.buyIcon {
+	position: absolute;
+	top: 0;
+	right: 0;
+	/* 	margin-top: 0.625rem;
+	margin-right: 0.625rem;
+	 */
+}
+
+.swiper-container {
+	width: 100%;
+	height: 100%;
+}
+
+.swiper-slide {
+	/* padding: 43px 53px; */
+	padding: 12% 14%;
+	box-sizing: border-box;
+	text-align: center;
+	font-size: 18px;
+	/* background: #fff; */
+	/* height: 667px; */
+	height: 100%;
+	/* Center slide text vertically */
+	display: -webkit-box;
+	display: -ms-flexbox;
+	display: -webkit-flex;
+	display: flex;
+	-webkit-box-pack: center;
+	-ms-flex-pack: center;
+	-webkit-justify-content: center;
+	justify-content: center;
+	-webkit-box-align: center;
+	-ms-flex-align: center;
+	-webkit-align-items: center;
+	align-items: center;
+}
+
+.swiper-button-next {
+	width: 15px;
+	height: 15px;
+	background: url(../../static/img/xia_icon.png);
+	right: 15px;
+}
+
+.swiper-button-prev {
+	width: 15px;
+	height: 15px;
+	background: url(../../static/img/shang_icon.png);
+	left: 15px;
+}
+
+.swiper-button-next:after,
+.swiper-container-rtl .swiper-button-prev:after {
+	content: "" !important;
+}
+
+.swiper-button-prev:after,
+.swiper-container-rtl .swiper-button-next:after {
+	content: "" !important;
+}
+
+.thl-time {
+	text-align: left;
+	margin-left: -0.4rem !important;
+}

File diff suppressed because it is too large
+ 13 - 0
static/webRtcYJ/css/swiper-bundle.min.css


File diff suppressed because it is too large
+ 14 - 0
static/webRtcYJ/css/swiper-bundle.min.js


+ 997 - 0
static/webRtcYJ/decoder.c

@@ -0,0 +1,997 @@
+#include <stdio.h>
+#include <sys/time.h>
+#include <sys/timeb.h>
+#include <unistd.h>
+
+typedef void(*VideoCallback)(unsigned char *buff, int size, double timestamp);
+typedef void(*AudioCallback)(unsigned char *buff, int size, double timestamp);
+typedef void(*RequestCallback)(int offset, int available);
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "libavcodec/avcodec.h"
+#include "libavformat/avformat.h"
+#include "libavutil/fifo.h"
+//#include "libswscale/swscale.h"
+
+#define MIN(X, Y)  ((X) < (Y) ? (X) : (Y))
+
+const int kCustomIoBufferSize = 32 * 1024;
+const int kInitialPcmBufferSize = 128 * 1024;
+const int kDefaultFifoSize = 1 * 1024 * 1024;
+const int kMaxFifoSize = 16 * 1024 * 1024;
+
+typedef enum ErrorCode {
+    kErrorCode_Success = 0,
+    kErrorCode_Invalid_Param,
+    kErrorCode_Invalid_State,
+    kErrorCode_Invalid_Data,
+    kErrorCode_Invalid_Format,
+    kErrorCode_NULL_Pointer,
+    kErrorCode_Open_File_Error,
+    kErrorCode_Eof,
+    kErrorCode_FFmpeg_Error,
+    kErrorCode_Old_Frame
+} ErrorCode;
+
+typedef enum LogLevel {
+    kLogLevel_None, //Not logging.
+    kLogLevel_Core, //Only logging core module(without ffmpeg).
+    kLogLevel_All   //Logging all, with ffmpeg.
+} LogLevel;
+
+typedef struct WebDecoder {
+    AVFormatContext *avformatContext;
+    AVCodecContext *videoCodecContext;
+    AVCodecContext *audioCodecContext;
+    AVFrame *avFrame;
+    int videoStreamIdx;
+    int audioStreamIdx;
+    VideoCallback videoCallback;
+    AudioCallback audioCallback;
+    RequestCallback requestCallback;
+    unsigned char *yuvBuffer;
+    //unsigned char *rgbBuffer;
+    unsigned char *pcmBuffer;
+    int currentPcmBufferSize;
+    int videoBufferSize;
+    int videoSize;
+    //struct SwsContext* swsCtx;
+    unsigned char *customIoBuffer;
+    FILE *fp;
+    char fileName[64];
+    int64_t fileSize;
+    int64_t fileReadPos;
+    int64_t fileWritePos;
+    int64_t lastRequestOffset;
+    double beginTimeOffset;
+    int accurateSeek;
+    // For streaming.
+    int isStream;
+    AVFifoBuffer *fifo;
+    int fifoSize;
+} WebDecoder;
+
+WebDecoder *decoder = NULL;
+LogLevel logLevel = kLogLevel_None;
+
+int getAailableDataSize();
+
+unsigned long getTickCount() {
+    struct timespec ts;
+    clock_gettime(CLOCK_MONOTONIC, &ts);
+    return ts.tv_sec * (unsigned long)1000 + ts.tv_nsec / 1000000;
+}
+
+void simpleLog(const char* format, ...) {
+    if (logLevel == kLogLevel_None) {
+        return;
+    }
+
+    char szBuffer[1024] = { 0 };
+    char szTime[32]		= { 0 };
+    char *p				= NULL;
+    int prefixLength	= 0;
+    const char *tag		= "Core";
+    struct tm tmTime;
+    struct timeb tb;
+
+    ftime(&tb);
+    localtime_r(&tb.time, &tmTime);
+
+    if (1) {
+        int tmYear		= tmTime.tm_year + 1900;
+        int tmMon		= tmTime.tm_mon + 1;
+        int tmMday		= tmTime.tm_mday;
+        int tmHour		= tmTime.tm_hour;
+        int tmMin		= tmTime.tm_min;
+        int tmSec		= tmTime.tm_sec;
+        int tmMillisec	= tb.millitm;
+        sprintf(szTime, "%d-%d-%d %d:%d:%d.%d", tmYear, tmMon, tmMday, tmHour, tmMin, tmSec, tmMillisec);
+    }
+
+    prefixLength = sprintf(szBuffer, "[%s][%s][DT] ", szTime, tag);
+    p = szBuffer + prefixLength;
+    
+    if (1) {
+        va_list ap;
+        va_start(ap, format);
+        vsnprintf(p, 1024 - prefixLength, format, ap);
+        va_end(ap);
+    }
+
+    printf("%s\n", szBuffer);
+}
+
+void ffmpegLogCallback(void* ptr, int level, const char* fmt, va_list vl) {
+    static int printPrefix	= 1;
+    static int count		= 0;
+    static char prev[1024]	= { 0 };
+    char line[1024]			= { 0 };
+    static int is_atty;
+    AVClass* avc = ptr ? *(AVClass**)ptr : NULL;
+    if (level > AV_LOG_DEBUG) {
+        return;
+    }
+
+    line[0] = 0;
+
+    if (printPrefix && avc) {
+        if (avc->parent_log_context_offset) {
+            AVClass** parent = *(AVClass***)(((uint8_t*)ptr) + avc->parent_log_context_offset);
+            if (parent && *parent) {
+                snprintf(line, sizeof(line), "[%s @ %p] ", (*parent)->item_name(parent), parent);
+            }
+        }
+        snprintf(line + strlen(line), sizeof(line) - strlen(line), "[%s @ %p] ", avc->item_name(ptr), ptr);
+    }
+
+    vsnprintf(line + strlen(line), sizeof(line) - strlen(line), fmt, vl);
+    line[strlen(line) + 1] = 0;
+    simpleLog("%s", line);
+}
+
+int openCodecContext(AVFormatContext *fmtCtx, enum AVMediaType type, int *streamIdx, AVCodecContext **decCtx) {
+    int ret = 0;
+    do {
+        int streamIndex		= -1;
+        AVStream *st		= NULL;
+        AVCodec *dec		= NULL;
+        AVDictionary *opts	= NULL;
+
+        ret = av_find_best_stream(fmtCtx, type, -1, -1, NULL, 0);
+        if (ret < 0) {
+            simpleLog("Could not find %s stream.", av_get_media_type_string(type));
+            break;
+        }
+
+        streamIndex = ret;
+        st = fmtCtx->streams[streamIndex];
+
+        dec = avcodec_find_decoder(st->codecpar->codec_id);
+        if (!dec) {
+            simpleLog("Failed to find %s codec %d.", av_get_media_type_string(type), st->codecpar->codec_id);
+            ret = AVERROR(EINVAL);
+            break;
+        }
+
+        *decCtx = avcodec_alloc_context3(dec);
+        if (!*decCtx) {
+            simpleLog("Failed to allocate the %s codec context.", av_get_media_type_string(type));
+            ret = AVERROR(ENOMEM);
+            break;
+        }
+
+        if ((ret = avcodec_parameters_to_context(*decCtx, st->codecpar)) != 0) {
+            simpleLog("Failed to copy %s codec parameters to decoder context.", av_get_media_type_string(type));
+            break;
+        }
+
+        av_dict_set(&opts, "refcounted_frames", "0", 0);
+
+        if ((ret = avcodec_open2(*decCtx, dec, NULL)) != 0) {
+            simpleLog("Failed to open %s codec.", av_get_media_type_string(type));
+            break;
+        }
+
+        *streamIdx = streamIndex;
+        avcodec_flush_buffers(*decCtx);
+    } while (0);
+
+    return ret;
+}
+
+void closeCodecContext(AVFormatContext *fmtCtx, AVCodecContext *decCtx, int streamIdx) {
+    do {
+        if (fmtCtx == NULL || decCtx == NULL) {
+            break;
+        }
+
+        if (streamIdx < 0 || streamIdx >= fmtCtx->nb_streams) {
+            break;
+        }
+
+        fmtCtx->streams[streamIdx]->discard = AVDISCARD_ALL;
+        avcodec_close(decCtx);
+    } while (0);
+}
+
+ErrorCode copyYuvData(AVFrame *frame, unsigned char *buffer, int width, int height) {
+    ErrorCode ret		= kErrorCode_Success;
+    unsigned char *src	= NULL;
+    unsigned char *dst	= buffer;
+    int i = 0;
+    do {
+        if (frame == NULL || buffer == NULL) {
+            ret = kErrorCode_Invalid_Param;
+            break;
+        }
+
+        if (!frame->data[0] || !frame->data[1] || !frame->data[2]) {
+            ret = kErrorCode_Invalid_Param;
+            break;
+        }
+
+        for (i = 0; i < height; i++) {
+            src = frame->data[0] + i * frame->linesize[0];
+            memcpy(dst, src, width);
+            dst += width;
+        }
+
+        for (i = 0; i < height / 2; i++) {
+            src = frame->data[1] + i * frame->linesize[1];
+            memcpy(dst, src, width / 2);
+            dst += width / 2;
+        }
+
+        for (i = 0; i < height / 2; i++) {
+            src = frame->data[2] + i * frame->linesize[2];
+            memcpy(dst, src, width / 2);
+            dst += width / 2;
+        }
+    } while (0);
+    return ret;	
+}
+
+/*
+ErrorCode yuv420pToRgb32(unsigned char *yuvBuff, unsigned char *rgbBuff, int width, int height) {
+    ErrorCode ret = kErrorCode_Success;
+    AVPicture yuvPicture, rgbPicture;
+    uint8_t *ptmp = NULL;
+    do {
+        if (yuvBuff == NULL || rgbBuff == NULL) {
+            ret = kErrorCode_Invalid_Param
+            break;
+        }
+
+        if (decoder == NULL || decoder->swsCtx == NULL) {
+            ret = kErrorCode_Invalid_Param
+            break;
+        }
+
+        
+        avpicture_fill(&yuvPicture, yuvBuff, AV_PIX_FMT_YUV420P, width, height);
+        avpicture_fill(&rgbPicture, rgbBuff, AV_PIX_FMT_RGB32, width, height);
+
+        ptmp = yuvPicture.data[1];
+        yuvPicture.data[1] = yuvPicture.data[2];
+        yuvPicture.data[2] = ptmp;
+
+        sws_scale(decoder->swsCtx, yuvPicture.data, yuvPicture.linesize, 0, height, rgbPicture.data, rgbPicture.linesize);
+    } while (0);
+    return ret;
+}
+*/
+
+int roundUp(int numToRound, int multiple) {
+    return (numToRound + multiple - 1) & -multiple;
+}
+
+ErrorCode processDecodedVideoFrame(AVFrame *frame) {
+    ErrorCode ret = kErrorCode_Success;
+    double timestamp = 0.0f;
+    do {
+        if (frame == NULL ||
+            decoder->videoCallback == NULL ||
+            decoder->yuvBuffer == NULL ||
+            decoder->videoBufferSize <= 0) {
+            ret = kErrorCode_Invalid_Param;
+            break;
+        }
+
+        if (decoder->videoCodecContext->pix_fmt != AV_PIX_FMT_YUV420P) {
+            simpleLog("Not YUV420P, but unsupported format %d.", decoder->videoCodecContext->pix_fmt);
+            ret = kErrorCode_Invalid_Format;
+            break;
+        }
+
+        ret = copyYuvData(frame, decoder->yuvBuffer, decoder->videoCodecContext->width, decoder->videoCodecContext->height);
+        if (ret != kErrorCode_Success) {
+            break;
+        }
+
+        /*
+        ret = yuv420pToRgb32(decoder->yuvBuffer, decoder->rgbBuffer, decoder->videoCodecContext->width, decoder->videoCodecContext->height);
+        if (ret != kErrorCode_Success) {
+            break;
+        }
+        */
+
+        timestamp = (double)frame->pts * av_q2d(decoder->avformatContext->streams[decoder->videoStreamIdx]->time_base);
+
+        if (decoder->accurateSeek && timestamp < decoder->beginTimeOffset) {
+            //simpleLog("video timestamp %lf < %lf", timestamp, decoder->beginTimeOffset);
+            ret = kErrorCode_Old_Frame;
+            break;
+        }
+        decoder->videoCallback(decoder->yuvBuffer, decoder->videoSize, timestamp);
+    } while (0);
+    return ret;
+}
+
+ErrorCode processDecodedAudioFrame(AVFrame *frame) {
+    ErrorCode ret       = kErrorCode_Success;
+    int sampleSize      = 0;
+    int audioDataSize   = 0;
+    int targetSize      = 0;
+    int offset          = 0;
+    int i               = 0;
+    int ch              = 0;
+    double timestamp    = 0.0f;
+    do {
+        if (frame == NULL) {
+            ret = kErrorCode_Invalid_Param;
+            break;
+        }
+
+        sampleSize = av_get_bytes_per_sample(decoder->audioCodecContext->sample_fmt);
+        if (sampleSize < 0) {
+            simpleLog("Failed to calculate data size.");
+            ret = kErrorCode_Invalid_Data;
+            break;
+        }
+
+        if (decoder->pcmBuffer == NULL) {
+            decoder->pcmBuffer = (unsigned char*)av_mallocz(kInitialPcmBufferSize);
+            decoder->currentPcmBufferSize = kInitialPcmBufferSize;
+            simpleLog("Initial PCM buffer size %d.", decoder->currentPcmBufferSize);
+        }
+
+        audioDataSize = frame->nb_samples * decoder->audioCodecContext->channels * sampleSize;
+        if (decoder->currentPcmBufferSize < audioDataSize) {
+            targetSize = roundUp(audioDataSize, 4);
+            simpleLog("Current PCM buffer size %d not sufficient for data size %d, round up to target %d.",
+                decoder->currentPcmBufferSize,
+                audioDataSize,
+                targetSize);
+            decoder->currentPcmBufferSize = targetSize;
+            av_free(decoder->pcmBuffer);
+            decoder->pcmBuffer = (unsigned char*)av_mallocz(decoder->currentPcmBufferSize);
+        }
+
+        for (i = 0; i < frame->nb_samples; i++) {
+            for (ch = 0; ch < decoder->audioCodecContext->channels; ch++) {
+                memcpy(decoder->pcmBuffer + offset, frame->data[ch] + sampleSize * i, sampleSize);
+                offset += sampleSize;
+            }
+        }
+
+        timestamp = (double)frame->pts * av_q2d(decoder->avformatContext->streams[decoder->audioStreamIdx]->time_base);
+
+        if (decoder->accurateSeek && timestamp < decoder->beginTimeOffset) {
+            //simpleLog("audio timestamp %lf < %lf", timestamp, decoder->beginTimeOffset);
+            ret = kErrorCode_Old_Frame;
+            break;
+        }
+        if (decoder->audioCallback != NULL) {
+            decoder->audioCallback(decoder->pcmBuffer, audioDataSize, timestamp);
+        }
+    } while (0);
+    return ret;
+}
+
+ErrorCode decodePacket(AVPacket *pkt, int *decodedLen) {
+    int ret = 0;
+    int isVideo = 0;
+    AVCodecContext *codecContext = NULL;
+
+    if (pkt == NULL || decodedLen == NULL) {
+        simpleLog("decodePacket invalid param.");
+        return kErrorCode_Invalid_Param;
+    }
+
+    *decodedLen = 0;
+
+    if (pkt->stream_index == decoder->videoStreamIdx) {
+        codecContext = decoder->videoCodecContext;
+        isVideo = 1;
+    } else if (pkt->stream_index == decoder->audioStreamIdx) {
+        codecContext = decoder->audioCodecContext;
+        isVideo = 0;
+    } else {
+        return kErrorCode_Invalid_Data;
+    }
+
+    ret = avcodec_send_packet(codecContext, pkt);
+    if (ret < 0) {
+        simpleLog("Error sending a packet for decoding %d.", ret);
+        return kErrorCode_FFmpeg_Error;
+    }
+
+    while (ret >= 0) {
+        ret = avcodec_receive_frame(codecContext, decoder->avFrame);
+        if (ret == AVERROR(EAGAIN)) {
+            return kErrorCode_Success;
+        } else if (ret == AVERROR_EOF) {
+            return kErrorCode_Eof;
+        } else if (ret < 0) {
+            simpleLog("Error during decoding %d.", ret);
+            return kErrorCode_FFmpeg_Error;
+        } else {
+            int r = isVideo ? processDecodedVideoFrame(decoder->avFrame) : processDecodedAudioFrame(decoder->avFrame);
+            if (r == kErrorCode_Old_Frame) {
+                return r;
+            }
+        }
+    }
+
+    *decodedLen = pkt->size;
+    return kErrorCode_Success;
+}
+
+int readFromFile(uint8_t *data, int len) {
+    //simpleLog("readFromFile %d.", len);
+    int32_t ret         = -1;
+    int availableBytes  = 0;
+    int canReadLen      = 0;
+    do {
+        if (decoder->fp == NULL) {
+            break;
+        }
+
+        availableBytes = decoder->fileWritePos - decoder->fileReadPos;
+        if (availableBytes <= 0) {
+            break;
+        }
+
+        fseek(decoder->fp, decoder->fileReadPos, SEEK_SET);
+        canReadLen = MIN(availableBytes, len);
+        fread(data, canReadLen, 1, decoder->fp);
+        decoder->fileReadPos += canReadLen;
+        ret = canReadLen;
+    } while (0);
+    //simpleLog("readFromFile ret %d.", ret);
+    return ret;
+}
+
+int readFromFifo(uint8_t *data, int len) {
+    //simpleLog("readFromFifo %d.", len);
+    int32_t ret         = -1;
+    int availableBytes  = 0;
+    int canReadLen      = 0;
+    do {
+        if (decoder->fifo == NULL) {
+            break;
+        }	
+
+        availableBytes = av_fifo_size(decoder->fifo);
+        if (availableBytes <= 0) {
+            break;
+        }
+
+        canReadLen = MIN(availableBytes, len);
+        av_fifo_generic_read(decoder->fifo, data, canReadLen, NULL);
+        ret = canReadLen;
+    } while (0);
+    //simpleLog("readFromFifo ret %d, left %d.", ret, av_fifo_size(decoder->fifo));
+    return ret;
+}
+
+int readCallback(void *opaque, uint8_t *data, int len) {
+    //simpleLog("readCallback %d.", len);
+    int32_t ret         = -1;
+    do {
+        if (decoder == NULL) {
+            break;
+        }
+
+        if (data == NULL || len <= 0) {
+            break;
+        }		
+
+        ret = decoder->isStream ? readFromFifo(data, len) : readFromFile(data, len);
+    } while (0);
+    //simpleLog("readCallback ret %d.", ret);
+    return ret;
+}
+
+int64_t seekCallback(void *opaque, int64_t offset, int whence) {
+    int64_t ret         = -1;
+    int64_t pos         = -1;
+    int64_t req_pos     = -1;
+    //simpleLog("seekCallback %lld %d.", offset, whence);
+    do {
+        if (decoder == NULL || decoder->isStream || decoder->fp == NULL) {
+            break;
+        }
+
+        if (whence == AVSEEK_SIZE) {
+            ret = decoder->fileSize;
+            break;
+        }
+
+        if (whence != SEEK_END && whence != SEEK_SET && whence != SEEK_CUR) {
+            break;
+        }
+
+        ret = fseek(decoder->fp, (long)offset, whence);
+        if (ret == -1) {
+            break;
+        }
+
+        pos = (int64_t)ftell(decoder->fp);
+        if (pos < decoder->lastRequestOffset || pos > decoder->fileWritePos) {
+            decoder->lastRequestOffset  = pos;
+            decoder->fileReadPos        = pos;
+            decoder->fileWritePos       = pos;
+            req_pos                     = pos;
+            ret                         = -1;  // Forcing not to call read at once.
+            decoder->requestCallback(pos, getAailableDataSize());
+            simpleLog("Will request %lld and return %lld.", pos, ret);
+            break;
+        }
+
+        decoder->fileReadPos = pos;
+        ret = pos;
+    } while (0);
+    //simpleLog("seekCallback return %lld.", ret);
+
+    if (decoder != NULL && decoder->requestCallback != NULL) {
+        decoder->requestCallback(req_pos, getAailableDataSize());
+    }
+    return ret;
+}
+
+int writeToFile(unsigned char *buff, int size) {
+    int ret = 0;
+    int64_t leftBytes = 0;
+    int canWriteBytes = 0;
+    do {
+        if (decoder->fp == NULL) {
+            ret = -1;
+            break;
+        }
+
+        leftBytes = decoder->fileSize - decoder->fileWritePos;
+        if (leftBytes <= 0) {
+            break;
+        }
+
+        canWriteBytes = MIN(leftBytes, size);
+        fseek(decoder->fp, decoder->fileWritePos, SEEK_SET);
+        fwrite(buff, canWriteBytes, 1, decoder->fp);
+        decoder->fileWritePos += canWriteBytes;
+        ret = canWriteBytes;
+    } while (0);
+    return ret;
+}
+
+int writeToFifo(unsigned char *buff, int size) {
+    int ret = 0;
+    do {
+        if (decoder->fifo == NULL) {
+            ret = -1;
+            break;
+        }
+
+        int64_t leftSpace = av_fifo_space(decoder->fifo);
+        if (leftSpace < size) {
+            int growSize = 0;
+            do {
+                leftSpace += decoder->fifoSize;
+                growSize += decoder->fifoSize;
+                decoder->fifoSize += decoder->fifoSize;
+            } while (leftSpace < size);
+            av_fifo_grow(decoder->fifo, growSize);
+
+            simpleLog("Fifo size growed to %d.", decoder->fifoSize);
+            if (decoder->fifoSize >= kMaxFifoSize) {
+                simpleLog("[Warn] Fifo size larger than %d.", kMaxFifoSize);
+            }
+        }
+
+        //simpleLog("Wrote %d bytes to fifo, total %d.", size, av_fifo_size(decoder->fifo));
+        ret = av_fifo_generic_write(decoder->fifo, buff, size, NULL);
+    } while (0);
+    return ret;
+}
+
+int getAailableDataSize() {
+    int ret = 0;
+    do {
+        if (decoder == NULL) {
+            break;
+        }
+
+        if (decoder->isStream) {
+            ret = decoder->fifo == NULL ? 0 : av_fifo_size(decoder->fifo);
+        } else {
+            ret = decoder->fileWritePos - decoder->fileReadPos;
+        }
+    } while (0);
+    return ret;
+}
+
+//////////////////////////////////Export methods////////////////////////////////////////
+ErrorCode initDecoder(int fileSize, int logLv) {
+    ErrorCode ret = kErrorCode_Success;
+    do {
+        //Log level.
+        logLevel = logLv;
+
+        if (decoder != NULL) {
+            break;
+        }
+
+        decoder = (WebDecoder *)av_mallocz(sizeof(WebDecoder));
+        if (fileSize >= 0) {
+            decoder->fileSize = fileSize;
+            sprintf(decoder->fileName, "tmp-%lu.mp4", getTickCount());
+            decoder->fp = fopen(decoder->fileName, "wb+");
+            if (decoder->fp == NULL) {
+                simpleLog("Open file %s failed, err: %d.", decoder->fileName, errno);
+                ret = kErrorCode_Open_File_Error;
+                av_free(decoder);
+                decoder = NULL;
+            }
+        } else {
+            decoder->isStream = 1;
+            decoder->fifoSize = kDefaultFifoSize;
+            decoder->fifo = av_fifo_alloc(decoder->fifoSize);
+        }
+    } while (0);
+    simpleLog("Decoder initialized %d.", ret);
+    return ret;
+}
+
+ErrorCode uninitDecoder() {
+    if (decoder != NULL) {
+        if (decoder->fp != NULL) {
+            fclose(decoder->fp);
+            decoder->fp = NULL;
+            remove(decoder->fileName);
+        }
+
+        if (decoder->fifo != NULL) {
+             av_fifo_freep(&decoder->fifo);
+        }
+
+        av_freep(&decoder);
+    }
+
+    av_log_set_callback(NULL);
+
+    simpleLog("Decoder uninitialized.");
+    return kErrorCode_Success;
+}
+
+ErrorCode openDecoder(int *paramArray, int paramCount, long videoCallback, long audioCallback, long requestCallback) {
+    ErrorCode ret = kErrorCode_Success;
+    int r = 0;
+    int i = 0;
+    int params[7] = { 0 };
+    do {
+        simpleLog("打开编码器.");
+
+        av_register_all();
+        avcodec_register_all();
+
+        if (logLevel == kLogLevel_All) {
+            av_log_set_callback(ffmpegLogCallback);
+        }
+        
+        decoder->avformatContext = avformat_alloc_context();
+        decoder->customIoBuffer = (unsigned char*)av_mallocz(kCustomIoBufferSize);
+
+        AVIOContext* ioContext = avio_alloc_context(
+            decoder->customIoBuffer,
+            kCustomIoBufferSize,
+            0,
+            NULL,
+            readCallback,
+            NULL,
+            seekCallback);
+        if (ioContext == NULL) {
+            ret = kErrorCode_FFmpeg_Error;
+            simpleLog("avio_alloc_context failed.");
+            break;
+        }
+
+        decoder->avformatContext->pb = ioContext;
+        decoder->avformatContext->flags = AVFMT_FLAG_CUSTOM_IO;
+		simpleLog("avformat_open_input.");
+
+        r = avformat_open_input(&decoder->avformatContext, NULL, NULL, NULL);
+        if (r != 0) {
+            ret = kErrorCode_FFmpeg_Error;
+            char err_info[32] = { 0 };
+            av_strerror(ret, err_info, 32);
+            simpleLog("avformat_open_input failed %d %s.", ret, err_info);
+            break;
+        }
+        
+        simpleLog("avformat_find_stream_info");
+
+        r = avformat_find_stream_info(decoder->avformatContext, NULL);
+        if (r != 0) {
+            ret = kErrorCode_FFmpeg_Error;
+            simpleLog("av_find_stream_info failed %d.", ret);
+            break;
+        }
+			
+			
+	
+        simpleLog("avformat_find_stream_info 成功.");
+
+        for (i = 0; i < decoder->avformatContext->nb_streams; i++) {
+            decoder->avformatContext->streams[i]->discard = AVDISCARD_DEFAULT;
+        }
+
+        r = openCodecContext(
+            decoder->avformatContext,
+            AVMEDIA_TYPE_VIDEO,
+            &decoder->videoStreamIdx,
+            &decoder->videoCodecContext);
+        if (r != 0) {
+            ret = kErrorCode_FFmpeg_Error;
+            simpleLog("Open video codec context failed %d.", ret);
+            break;
+        }
+
+        simpleLog("Open video codec context success, video stream index %d %x.",
+            decoder->videoStreamIdx, (unsigned int)decoder->videoCodecContext);
+
+        simpleLog("Video stream index:%d pix_fmt:%d resolution:%d*%d.",
+            decoder->videoStreamIdx,
+            decoder->videoCodecContext->pix_fmt,
+            decoder->videoCodecContext->width,
+            decoder->videoCodecContext->height);
+
+        r = openCodecContext(
+            decoder->avformatContext,
+            AVMEDIA_TYPE_AUDIO,
+            &decoder->audioStreamIdx,
+            &decoder->audioCodecContext);
+        if (r != 0) {
+            ret = kErrorCode_FFmpeg_Error;
+            simpleLog("Open audio codec context failed %d.", ret);
+            break;
+        }
+
+        simpleLog("Open audio codec context success, audio stream index %d %x.",
+            decoder->audioStreamIdx, (unsigned int)decoder->audioCodecContext);
+
+        simpleLog("Audio stream index:%d sample_fmt:%d channel:%d, sample rate:%d.",
+            decoder->audioStreamIdx,
+            decoder->audioCodecContext->sample_fmt,
+            decoder->audioCodecContext->channels,
+            decoder->audioCodecContext->sample_rate);
+
+        av_seek_frame(decoder->avformatContext, -1, 0, AVSEEK_FLAG_BACKWARD);
+
+        /* For RGB Renderer(2D WebGL).
+        decoder->swsCtx = sws_getContext(
+            decoder->videoCodecContext->width,
+            decoder->videoCodecContext->height,
+            decoder->videoCodecContext->pix_fmt, 
+            decoder->videoCodecContext->width,
+            decoder->videoCodecContext->height,
+            AV_PIX_FMT_RGB32,
+            SWS_BILINEAR, 
+            0, 
+            0, 
+            0);
+        if (decoder->swsCtx == NULL) {
+            simpleLog("sws_getContext failed.");
+            ret = kErrorCode_FFmpeg_Error;
+            break;
+        }
+        */
+        
+        decoder->videoSize = avpicture_get_size(
+            decoder->videoCodecContext->pix_fmt,
+            decoder->videoCodecContext->width,
+            decoder->videoCodecContext->height);
+
+        decoder->videoBufferSize = 3 * decoder->videoSize;
+        decoder->yuvBuffer = (unsigned char *)av_mallocz(decoder->videoBufferSize);
+        decoder->avFrame = av_frame_alloc();
+        
+        params[0] = 1000 * (decoder->avformatContext->duration + 5000) / AV_TIME_BASE;
+        params[1] = decoder->videoCodecContext->pix_fmt;
+        params[2] = decoder->videoCodecContext->width;
+        params[3] = decoder->videoCodecContext->height;
+        params[4] = decoder->audioCodecContext->sample_fmt;
+        params[5] = decoder->audioCodecContext->channels;
+        params[6] = decoder->audioCodecContext->sample_rate;
+
+        enum AVSampleFormat sampleFmt = decoder->audioCodecContext->sample_fmt;
+        if (av_sample_fmt_is_planar(sampleFmt)) {
+            const char *packed = av_get_sample_fmt_name(sampleFmt);
+            params[4] = av_get_packed_sample_fmt(sampleFmt);
+        }
+
+        if (paramArray != NULL && paramCount > 0) {
+            for (int i = 0; i < paramCount; ++i) {
+                paramArray[i] = params[i];
+            }
+        }
+
+        decoder->videoCallback = (VideoCallback)videoCallback;
+        decoder->audioCallback = (AudioCallback)audioCallback;
+        decoder->requestCallback = (RequestCallback)requestCallback;
+
+        simpleLog("Decoder opened, duration %ds, picture size %d.", params[0], decoder->videoSize);
+    } while (0);
+
+    if (ret != kErrorCode_Success && decoder != NULL) {
+        av_freep(&decoder);
+    }
+    return ret;
+}
+
+ErrorCode closeDecoder() {
+    ErrorCode ret = kErrorCode_Success;
+    do {
+        if (decoder == NULL || decoder->avformatContext == NULL) {
+            break;
+        }
+
+        if (decoder->videoCodecContext != NULL) {
+            closeCodecContext(decoder->avformatContext, decoder->videoCodecContext, decoder->videoStreamIdx);
+            decoder->videoCodecContext = NULL;
+            simpleLog("Video codec context closed.");
+        }
+
+        if (decoder->audioCodecContext != NULL) {
+            closeCodecContext(decoder->avformatContext, decoder->audioCodecContext, decoder->audioStreamIdx);
+            decoder->audioCodecContext = NULL;
+            simpleLog("Audio codec context closed.");
+        }
+
+        AVIOContext *pb = decoder->avformatContext->pb;
+        if (pb != NULL) {
+            if (pb->buffer != NULL) {
+                av_freep(&pb->buffer);
+                decoder->customIoBuffer = NULL;
+            }
+            av_freep(&decoder->avformatContext->pb);
+            simpleLog("IO context released.");
+        }
+
+        avformat_close_input(&decoder->avformatContext);
+        decoder->avformatContext = NULL;
+        simpleLog("Input closed.");
+
+        if (decoder->yuvBuffer != NULL) {
+            av_freep(&decoder->yuvBuffer);
+        }
+
+        if (decoder->pcmBuffer != NULL) {
+            av_freep(&decoder->pcmBuffer);
+        }
+        
+        if (decoder->avFrame != NULL) {
+            av_freep(&decoder->avFrame);
+        }
+        simpleLog("All buffer released.");
+    } while (0);
+    return ret;
+}
+
+int sendData(unsigned char *buff, int size) {
+    int ret = 0;
+    int64_t leftBytes = 0;
+    int canWriteBytes = 0;
+    do {
+        if (decoder == NULL) {
+            ret = -1;
+            break;
+        }
+
+        if (buff == NULL || size == 0) {
+            ret = -2;
+            break;
+        }
+
+        ret = decoder->isStream ? writeToFifo(buff, size) : writeToFile(buff, size);
+    } while (0);
+    return ret;
+}
+
+ErrorCode decodeOnePacket() {
+    ErrorCode ret	= kErrorCode_Success;
+    int decodedLen	= 0;
+    int r			= 0;
+
+    AVPacket packet;
+    av_init_packet(&packet);
+    do {
+        if (decoder == NULL) {
+            ret = kErrorCode_Invalid_State;
+            break;
+        }
+
+        if (getAailableDataSize() <= 0) {
+            ret = kErrorCode_Invalid_State;
+            break;
+        }
+
+        packet.data = NULL;
+        packet.size = 0;
+
+        r = av_read_frame(decoder->avformatContext, &packet);
+        if (r == AVERROR_EOF) {
+            ret = kErrorCode_Eof;
+            break;
+        }
+
+        if (r < 0 || packet.size == 0) {
+            break;
+        }
+
+        do {
+            ret = decodePacket(&packet, &decodedLen);
+            if (ret != kErrorCode_Success) {
+                break;
+            }
+
+            if (decodedLen <= 0) {
+                break;
+            }
+
+            packet.data += decodedLen;
+            packet.size -= decodedLen;
+        } while (packet.size > 0);
+    } while (0);
+    av_packet_unref(&packet);
+    return ret;
+}
+
+ErrorCode seekTo(int ms, int accurateSeek) {
+    int ret = 0;
+    int64_t pts = (int64_t)ms * 1000;
+    decoder->accurateSeek = accurateSeek;
+    ret = avformat_seek_file(decoder->avformatContext,
+                                 -1,
+                                 INT64_MIN,
+                                 pts,
+                                 pts,
+                                 AVSEEK_FLAG_BACKWARD);
+    simpleLog("Native seek to %d return %d %d.", ms, ret, decoder->accurateSeek);
+    if (ret == -1) {
+        return kErrorCode_FFmpeg_Error;
+    } else {
+        avcodec_flush_buffers(decoder->videoCodecContext);
+        avcodec_flush_buffers(decoder->audioCodecContext);
+
+        // Trigger seek callback
+        AVPacket packet;
+        av_init_packet(&packet);
+        av_read_frame(decoder->avformatContext, &packet);
+
+        decoder->beginTimeOffset = (double)ms / 1000;
+        return kErrorCode_Success;
+    }
+}
+
+int main() {
+    //simpleLog("Native loaded.");
+    return 0;
+}
+
+#ifdef __cplusplus
+}
+#endif

+ 175 - 0
static/webRtcYJ/decoder.js

@@ -0,0 +1,175 @@
+var isFinish = false;
+var isSwitchSharpness = false;
+var h264Queue = [];
+var ret;
+
+var maxWidth = 1080;
+var maxHeight = 1920;
+var globalYuvPtr = undefined;
+var golbalYuvData; //全局,只分配一次
+var renderCount = 0;
+var curFrameWidth = undefined;
+var curFrameHeight = undefined;
+
+function doSomeInit() {
+  var allocSize = maxWidth * maxHeight * 3;
+  golbalYuvData = new Uint8Array(allocSize);
+}
+self.importScripts('ffmpeghelper.js');
+self.importScripts('spsParser.js');
+
+self.Module.onRuntimeInitialized = function () {
+  isFinish = true;
+  doSomeInit();
+  ret = Module._openDecoder();
+
+  if (!ret) {
+    console.log('打开编码器成功');
+  }
+};
+
+self.addEventListener(
+  'message',
+  function (e) {
+    var msg = e.data;
+    if (msg.type == 'rawData') {
+      var buffer = e.data.data;
+
+      if (buffer[0] !== 0xff) {
+        // 音频解码
+        var type = buffer[4] & 0x1f;
+        if (type == 7) {
+          let info = spsParser(buffer);
+
+          if (curFrameWidth != undefined && curFrameHeight != undefined) {
+            if (info.width != curFrameWidth || info.height != curFrameHeight) {
+              // 分辨率发生改变,切换
+              console.log('🚀 ~ file: decoder.js ~ line 44 ~ 分辨率发生改变');
+              switchNewStream();
+            }
+          }
+
+          curFrameWidth = info.width;
+          curFrameHeight = info.height;
+          h264Queue.push(buffer);
+        } else {
+          h264Queue.push(buffer);
+        }
+      }
+    }
+  },
+  false,
+);
+
+function PrintfLog(str) {
+  var curTime = new Date().getTime();
+  var objData = {
+    cmd: 0,
+    data: str,
+    time: curTime,
+  };
+  self.postMessage(objData);
+}
+
+function doRequestIFrame() {
+  var objData = {
+    cmd: 5,
+  };
+  self.postMessage(objData);
+}
+
+function decodeVideo() {
+  if (h264Queue.length > 0 && isFinish) {
+    decodeH264(h264Queue.shift());
+  }
+}
+
+var timeFlag = setInterval(decodeVideo, 1);
+
+//实现C 端到js yuv数据赋值, 并分配给opengl 渲染
+/*yuvData, 存放yuv数据的数组
+ * inputYuvPtr C++
+ */
+function dispatchYuvData(
+  yuvData,
+  inputYuvPtr,
+  videoWidth,
+  videoHeight,
+  copyLen,
+) {
+  for (i = 0; i < copyLen; i++) {
+    yuvData[i] = Module.HEAPU8[inputYuvPtr + i];
+  }
+
+  var curTime = new Date().getTime();
+  var objData = {
+    cmd: 1,
+    data: yuvData,
+    time: curTime,
+    width: videoWidth,
+    height: videoHeight,
+  };
+  self.postMessage(objData);
+}
+
+function decodeH264(data) {
+  var frameWidth = 0;
+  var frameHeight = 0;
+  var inputPtr = Module._malloc(data.length); //输入数据
+
+  for (var i = 0; i < data.length; i++) {
+    Module.HEAPU8[inputPtr + i] = data[i]; //转换为堆数据
+  }
+
+  var allocSize = (maxWidth * maxHeight * 3) / 2;
+  if (globalYuvPtr == undefined) {
+    globalYuvPtr = Module._malloc(allocSize);
+  }
+
+  var ret = Module._feedData(inputPtr, data.length, globalYuvPtr);
+
+  if (ret >= 0) {
+    //解码成功才考虑渲染
+    frameWidth = Module._getVideoWidth(); //拿到解码器宽、高
+    frameHeight = Module._getVideoHeight();
+    // console.log("🚀 ~ file: decoder.js ~ line 134 ~ decodeH264 ~ frameWidth", frameWidth,frameHeight)
+    var copyLen = (frameWidth * frameHeight * 3) / 2; //只拷贝必须的长度
+
+    if (renderCount > 1) {
+      //第一帧因为画面时全绿色的不渲染
+      dispatchYuvData(
+        golbalYuvData,
+        globalYuvPtr,
+        frameWidth,
+        frameHeight,
+        copyLen,
+      );
+    } else {
+      renderCount++;
+    }
+  }
+
+  Module._free(inputPtr);
+}
+
+function switchNewStream() {
+  closeDecoder();
+  var ret = Module._openDecoder(); //再次开启解码器
+  var timeFlag = setInterval(decodeVideo, 1);
+  console.log('切换解码器成功');
+}
+
+function closeDecoder() {
+  clearInterval(timeFlag); //关闭原有定时器
+  Module._closeDecoder();
+  renderCount = 0;
+
+  if (globalYuvPtr != undefined) {
+    Module._free(globalYuvPtr);
+    globalYuvPtr = undefined;
+  }
+  console.log('此时buffer长度: %d', h264Queue.length);
+  while (h264Queue.lengh > 0) {
+    h264Queue.shift();
+  }
+}

+ 136 - 0
static/webRtcYJ/ffmpegTest.c

@@ -0,0 +1,136 @@
+#include <stdio.h>
+#include <unistd.h>
+#include "libavformat/avformat.h"
+#include "libswscale/swscale.h"
+#include "libavutil/imgutils.h"
+//#include "libswresample/swresample.h"
+#include "libavcodec/avcodec.h"
+#include "libavutil/time.h"
+
+int rgbSize = 0;
+typedef unsigned char byte;
+uint8_t *out_buffer = NULL;
+AVCodec *videoCodec = NULL;
+AVFrame *frame = NULL;
+AVFrame *yuvFrame = NULL;
+AVCodecContext *videoCodecCtx = NULL;
+struct SwsContext*	m_img_convert_ctx = NULL;
+
+void ffmpegLog()
+{
+	
+}
+
+//打开解码器
+int openDecoder(int width, int height)
+{
+	int ret;
+	int errorCode = 0;
+	uint8_t *out_buffer = NULL;
+	
+	videoCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
+	videoCodecCtx = avcodec_alloc_context3(videoCodec);
+
+	frame = av_frame_alloc();
+	yuvFrame = av_frame_alloc();
+
+	if ((errorCode = avcodec_open2(videoCodecCtx, videoCodec, NULL)) < 0)
+	{
+		return errorCode;
+	}
+
+	videoCodecCtx->width = width;
+	videoCodecCtx->height = height;
+	videoCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
+
+	rgbSize = videoCodecCtx->width * videoCodecCtx->height * 3 / 2;
+	out_buffer = (unsigned char *)av_malloc(rgbSize);
+	av_image_fill_arrays(yuvFrame->data, yuvFrame->linesize, out_buffer, AV_PIX_FMT_YUV420P, videoCodecCtx->width, videoCodecCtx->height, 1);
+
+	m_img_convert_ctx = sws_getContext(videoCodecCtx->width, videoCodecCtx->height,
+			videoCodecCtx->pix_fmt, videoCodecCtx->width, videoCodecCtx->height,
+			AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
+			
+	printf("c 端已经打开编码器, 哈哈\n");
+	return 0;
+}
+
+void PrintArry(byte*data, size_t size)
+{
+	printf("打印数组:");
+	for(size_t i = 0;i < size;i++)
+	{
+	  printf("%02X", data[i]);
+	}	
+	printf("\n");
+}
+
+int feedData(byte*data, size_t size, byte* outBuffer)
+{
+	int ret = -1;
+	int len = videoCodecCtx->width*videoCodecCtx->height;
+	AVPacket *videoPacket = av_packet_alloc();	
+	av_new_packet(videoPacket, size);
+	//printf("准备拷贝\n");
+	memcpy(videoPacket->data, data, size);
+	//printf("准备解码\n");
+	
+	uint64_t start = av_gettime();
+	avcodec_send_packet(videoCodecCtx, videoPacket);
+	ret = avcodec_receive_frame(videoCodecCtx, frame);
+	//PrintArry(data, size);
+	uint64_t cost = av_gettime() - start/1000;
+	printf("解码耗时 %d ms", cost);
+	
+	if(ret == 0)
+	{
+		ret = sws_scale(m_img_convert_ctx, (const uint8_t* const*)frame->data, frame->linesize, 0, videoCodecCtx->height, yuvFrame->data, yuvFrame->linesize);		
+		byte *p= outBuffer;
+		memcpy(p, yuvFrame->data[0], len);	
+		p = p + len;
+		memcpy(p, yuvFrame->data[1], len / 4);
+		p = p + len / 4;
+		memcpy(p, yuvFrame->data[2], len / 4);		
+	}
+	
+	if(ret < 0)
+	{
+		char temp[4096];
+		av_strerror(ret, temp, 4096);
+	//	printf("错误打印 %s \n", temp);
+	}
+	
+	av_packet_free(&videoPacket);
+	return ret;
+}
+
+//关闭解码器
+void closeDecoder()
+{
+	if(out_buffer)
+	{
+		free(out_buffer);
+	}
+	
+	av_frame_unref(frame);
+	av_frame_unref(yuvFrame);
+	av_frame_free(&frame);
+	av_frame_free(&yuvFrame);
+	
+	if (m_img_convert_ctx)
+	{
+		sws_freeContext(m_img_convert_ctx);
+	}
+	
+	if (videoCodecCtx)
+	{
+		avcodec_free_context(&videoCodecCtx);
+	}
+}
+
+
+int main(int argc, char**argv)
+{	
+	//openDecoder(720, 1280);
+	return 0;
+}

File diff suppressed because it is too large
+ 1 - 0
static/webRtcYJ/ffmpeghelper.js


BIN
static/webRtcYJ/ffmpeghelper.wasm


+ 274 - 0
static/webRtcYJ/helper.js

@@ -0,0 +1,274 @@
+//此文件实现将控制命令封装成协议,具体协议内容请看:
+//链接:http://note.youdao.com/noteshare?id=dabda6c613adef7a416bd2625cd770a1
+
+//bcc校验码计算
+//arry: 要计算的数组
+//返回计算协议中校验位的校验码
+function calBcc(arry) {
+  var bcc = 0;
+  for (var i = 0; i < arry.length; i++) {
+    bcc ^= arry[i];
+  }
+  return bcc;
+}
+
+//数组打印,调试用
+function PrintArry(data) {
+  var str = '';
+
+  for (var i = 0; i < data.length; i++) {
+    str = str + data[i].toString(16).padStart(2, '0');
+  }
+
+  str = str.toUpperCase();
+  return str;
+}
+
+//sn:板卡sn号,stirng
+//type:数据类型,数字
+//jsonCmd: json命令
+//返回值:生成一个Uint8Array,通过websocket发送给板卡
+function makeFrame(sn, dataType, jsonCmd) {
+  var index = 0;
+  var dataLen = jsonCmd.length;
+  var frameLen = dataLen + 26;
+  var outPut = new Uint8Array(frameLen);
+  outPut[index++] = 0x68;
+  outPut[index++] = (dataLen & 0xff000000) >> 24;
+  outPut[index++] = (dataLen & 0x00ff0000) >> 16;
+  outPut[index++] = (dataLen & 0x0000ff00) >> 8;
+  outPut[index++] = dataLen & 0x000000ff;
+  outPut[index++] = 0; //类型为client
+
+  //sn号赋值,string转ascii
+  for (var i = 0; i < sn.length; i++) {
+    outPut[index++] = sn[i].charCodeAt();
+  }
+
+  outPut[index++] = dataType; //指定数据类型为json
+  //json string转ascii
+  for (var i = 0; i < jsonCmd.length; i++) {
+    outPut[index++] = jsonCmd[i].charCodeAt();
+  }
+
+  var bccBuffer = outPut.slice(1, frameLen - 3 + 1); //忽略协议头和协议尾
+  outPut[index++] = calBcc(bccBuffer);
+  outPut[index++] = 0x16;
+  //console.log("打印数组:%s", PrintArry(outPut));
+  // return PrintArry(outPut);//C# 转发器专用
+  return outPut;
+}
+
+//{"data":{"keyCode":"25"},"event":"keyCode"}
+//触发键盘事件, code表示键盘值
+function ExexuteKeyDown(code) {
+  var jsonObj = { data: { keyCode: code }, event: 'keyCode' };
+  var json = JSON.stringify(jsonObj);
+  var sn = 'RK3923C1201900139';
+  return makeFrame(sn, 0, json);
+}
+//触发鼠标按下事件,x:x坐标, y:y坐标
+
+function ExexuteMouseDown(x, y) {
+  var jsonObj = {
+    data: { action: 0, count: 1, pointerId: 0, x: x, y: y },
+    event: '0',
+  };
+  var json = JSON.stringify(jsonObj);
+  var sn = 'RK3923C1201900139';
+  return makeFrame(sn, 0, json);
+}
+//触发鼠标移动事件,x:x坐标, y:y坐标
+function ExexuteMouseMove(x, y) {
+  var jsonObj = {
+    data: { action: 2, count: 1, pointerId: 0, x: x, y: y },
+    event: '2',
+  };
+  var json = JSON.stringify(jsonObj);
+  var sn = 'RK3923C1201900139';
+  return makeFrame(sn, 0, json);
+}
+//触发鼠标抬起事件,x:x坐标, y:y坐标
+function ExexuteMouseUp(x, y) {
+  var jsonObj = {
+    data: { action: 1, count: 1, pointerId: 0, x: x, y: y },
+    event: '1',
+  };
+  var json = JSON.stringify(jsonObj);
+  var sn = 'RK3923C1201900139';
+  return makeFrame(sn, 0, json);
+}
+function ExexuteKeyBoard(keycode) {
+  // 187:虚拟按键菜单键,3:虚拟按键home键,4:虚拟按键返回键,24:音量增大,25:音量减小
+  var jsonObj = { data: { keyCode: keycode.toString() }, type: 'keyCode' };
+  return JSON.stringify(jsonObj);
+}
+
+function makeFrameExtend(sn, dataType, body) {
+  var index = 0;
+  var dataLen = body.length;
+  var frameLen = dataLen + 26;
+  var outPut = new Uint8Array(frameLen);
+  outPut[index++] = 0x68;
+  outPut[index++] = (dataLen & 0xff000000) >> 24;
+  outPut[index++] = (dataLen & 0x00ff0000) >> 16;
+  outPut[index++] = (dataLen & 0x0000ff00) >> 8;
+  outPut[index++] = dataLen & 0x000000ff;
+  outPut[index++] = 0; //类型为client
+
+  //sn号赋值,string转ascii
+  for (var i = 0; i < sn.length; i++) {
+    outPut[index++] = sn[i].charCodeAt();
+  }
+
+  outPut[index++] = dataType; //指定数据类型为json
+  //json string转ascii
+  for (var i = 0; i < body.length; i++) {
+    outPut[index++] = body[i];
+  }
+
+  var bccBuffer = outPut.slice(1, frameLen - 3 + 1); //忽略协议头和协议尾
+  outPut[index++] = calBcc(bccBuffer);
+  outPut[index++] = 0x16;
+  //var str = PrintArry(outPut);
+  //console.log("打印数组:%s", PrintArry(outPut));
+  //return PrintArry(outPut);
+  return outPut;
+}
+
+//根据报文识别屏幕方向, 0横屏,1竖屏
+function CheckScreenDirection(data) {
+  if (data[0] == 0 && data[1] == 0 && data[2] == 0 && data[3] == 1) {
+    if (data[4] == 1 && data[5] == 1) {
+      if (data[6] == 1) {
+        screen = data[7];
+        return screen;
+      }
+    }
+  }
+}
+
+var emptyCount = 0;
+
+function GetEmptyFrame() {
+  var emptyFrame = new Uint8Array([
+    0xff, 0xf1, 0x50, 0x80, 0x12, 0x5f, 0xfc, 0x21, 0x1a, 0xc8, 0x01, 0x27,
+    0xfc, 0xc0, 0x00, 0x7e, 0x03, 0x10, 0x40, 0x63, 0x3d, 0x77, 0xe2, 0xb6,
+    0xe3, 0x6e, 0x00, 0x37, 0x56, 0x78, 0xeb, 0x70, 0xab, 0xc5, 0x58, 0x08,
+    0x59, 0x76, 0xf0, 0x47, 0x3d, 0x23, 0x6c, 0xa6, 0x2b, 0x59, 0x4e, 0x9c,
+    0xe0, 0x23, 0x1c, 0x2d, 0x74, 0xcb, 0xe2, 0xfc, 0x77, 0x7d, 0x26, 0x13,
+    0xc3, 0x04, 0x40, 0x02, 0x60, 0xf6, 0x03, 0x20, 0x80, 0xc7, 0x9a, 0x11,
+    0x0e, 0x9b, 0xda, 0xa0, 0x84, 0x00, 0x2a, 0x95, 0x4a, 0x1e, 0x74, 0xa5,
+    0x40, 0x2a, 0xca, 0xa8, 0xca, 0xf0, 0xf2, 0x1e, 0xa8, 0x77, 0x86, 0xa0,
+    0x62, 0x8c, 0xb8, 0x5f, 0xa6, 0x67, 0xbf, 0x0d, 0x27, 0x8b, 0xf9, 0x58,
+    0xbd, 0xe3, 0x2d, 0x0c, 0xbf, 0x48, 0x3c, 0xfd, 0x70, 0x78, 0x5e, 0xa9,
+    0x0b, 0x24, 0x9c, 0x13, 0x98, 0xa4, 0xa0, 0x6e, 0xca, 0xaa, 0x7a, 0x88,
+    0xa5, 0x0c, 0x2e, 0x83, 0x59, 0x02, 0x24, 0x01, 0x41, 0x03, 0x92, 0x10,
+    0x40, 0x07,
+  ]);
+  return emptyFrame;
+}
+
+//查询屏幕方向
+function GetScreenState() {
+  var sn = 'RK3923C1201900139';
+  var outPut = new Uint8Array([0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x02]);
+  return makeFrameExtend(sn, 5, outPut);
+}
+
+//生成鉴权报文
+function VerifyCode(sn, code) {
+  var len = code.length + 1;
+  var codeBuffer = new TextEncoder('utf-8').encode(code); //获取字符串ascii码
+  var buffer = new Uint8Array(len);
+  buffer[0] = 0x04;
+
+  for (var i = 0; i < codeBuffer.length; i++) {
+    buffer[i + 1] = codeBuffer[i];
+  }
+
+  return makeFrameExtend(sn, 6, buffer);
+}
+
+//
+function CheckVerifyCode(data) {
+  var dataLen = data.length - 26;
+  var body = data.slice(24, 24 + dataLen);
+  console.log('打印:' + PrintArry(body));
+
+  if (body[3] == 0x03) {
+    return true;
+  }
+
+  return false;
+}
+
+//通道配置
+function ConfigChannel(sn, channelName) {
+  var chanelBuffer = new TextEncoder('utf-8').encode(channelName);
+  var outPut = [];
+  outPut.push(0x07);
+
+  for (var i = 0; i < chanelBuffer.length; i++) {
+    outPut.push(chanelBuffer[i]);
+  }
+  return makeFrameExtend(sn, 6, outPut);
+}
+
+function OpenFileLog(sn) {
+  var outPut = new Uint8Array([0x01]);
+  return makeFrameExtend(sn, 7, outPut);
+}
+
+//多端登录json 汇报生成
+function makeMultiLogin(sn, jsonData) {
+  var jsonObj = {
+    type: 3,
+    data: jsonData,
+  };
+
+  var jsonStr = JSON.stringify(jsonObj);
+  var outPut = new TextEncoder('utf-8').encode(jsonStr);
+  return makeFrameExtend(sn, 0x0d, outPut);
+}
+
+//统计登录信息
+function makeStatistics(sn, jsonData) {
+  var jsonObj = {
+    type: 4,
+    data: jsonData,
+  };
+
+  var jsonStr = JSON.stringify(jsonObj);
+  var outPut = new TextEncoder('utf-8').encode(jsonStr);
+  return makeFrameExtend(sn, 0x0d, outPut);
+}
+
+//多端登录数据解析
+function checkMultiLoginInfo(input) {
+  var dataLen = input.length - 26; //得到json 长度
+  var jsonHex = input.slice(24, 24 + dataLen); //截取json hex二进制数据
+  var jsonStr = new TextDecoder('utf-8').decode(jsonHex);
+  console.log('取得json 字符串:' + jsonStr);
+  var jsonObj = JSON.parse(jsonStr);
+  return jsonObj;
+}
+
+//切换清晰度
+function makeSharpness(level) {
+  var sn = 'RK3923C1201900139';
+  var jsonObj = {
+    type: 2,
+    data: { definition: level, clientType: 'h5', sceneType: 'cloudPhone' },
+  };
+  var jsonStr = JSON.stringify(jsonObj);
+  var outPut = new TextEncoder('utf-8').encode(jsonStr);
+  return makeFrameExtend(sn, 0xd, outPut);
+}
+
+//I 帧请求报文生成
+function RequestIFrame() {
+  var sn = 'RK3923C1201900139';
+  var outPut = new Uint8Array([0x20]);
+  return makeFrameExtend(sn, 6, outPut);
+}

BIN
static/webRtcYJ/img/close.png


BIN
static/webRtcYJ/img/countdown.png


BIN
static/webRtcYJ/img/fenxiang_icon.png


BIN
static/webRtcYJ/img/fenxiang_icon@2x.png


BIN
static/webRtcYJ/img/goumai_icon(1).png


BIN
static/webRtcYJ/img/goumai_icon.png


BIN
static/webRtcYJ/img/goumai_icon@2x(1).png


BIN
static/webRtcYJ/img/goumai_icon@2x.png


BIN
static/webRtcYJ/img/guanbi_icon@2x.png


BIN
static/webRtcYJ/img/jia_bu_icon.png


BIN
static/webRtcYJ/img/jia_bu_icon@2x.png


BIN
static/webRtcYJ/img/jia_ke_icon.png


BIN
static/webRtcYJ/img/jia_ke_icon@2x.png


BIN
static/webRtcYJ/img/jian_bu_icon.png


BIN
static/webRtcYJ/img/jian_bu_icon@2x.png


BIN
static/webRtcYJ/img/jian_ke_icon.png


BIN
static/webRtcYJ/img/jian_ke_icon@2x.png


BIN
static/webRtcYJ/img/jianqieban_pic@2x.png


BIN
static/webRtcYJ/img/kefu_wei_icon.png


BIN
static/webRtcYJ/img/kefu_wei_icon@2x.png


BIN
static/webRtcYJ/img/kefu_xuanzhong_icon.png


BIN
static/webRtcYJ/img/kefu_xuanzhong_icon@2x.png


BIN
static/webRtcYJ/img/kefurexian_icon.png


BIN
static/webRtcYJ/img/kefurexian_icon@2x.png


BIN
static/webRtcYJ/img/kuorong_icon.png


BIN
static/webRtcYJ/img/kuorong_icon@2x.png


BIN
static/webRtcYJ/img/qq_icon.png


BIN
static/webRtcYJ/img/qq_icon@2x.png


BIN
static/webRtcYJ/img/shijian_icon.png


BIN
static/webRtcYJ/img/shijian_icon@2x.png


BIN
static/webRtcYJ/img/smallBell.png


BIN
static/webRtcYJ/img/wenzi_icon.png


BIN
static/webRtcYJ/img/wenzi_icon@2x.png


BIN
static/webRtcYJ/img/wode_wei_icon.png


BIN
static/webRtcYJ/img/wode_wei_icon@2x.png


BIN
static/webRtcYJ/img/wode_xuanzhong_icon.png


BIN
static/webRtcYJ/img/wode_xuanzhong_icon@2x.png


BIN
static/webRtcYJ/img/xiazai_icon.png


BIN
static/webRtcYJ/img/xiazai_icon@2x.png


BIN
static/webRtcYJ/img/xuankang_xuan_icon.png


BIN
static/webRtcYJ/img/xuankang_xuan_icon@2x.png


BIN
static/webRtcYJ/img/xuankuang_wei_icon.png


BIN
static/webRtcYJ/img/xuankuang_wei_icon@2x.png


BIN
static/webRtcYJ/img/yunshouji_wei_icon.png


BIN
static/webRtcYJ/img/yunshouji_wei_icon@2x.png


BIN
static/webRtcYJ/img/yunshouji_xuanzhong_icon.png


BIN
static/webRtcYJ/img/yunshouji_xuanzhong_icon@2x.png


File diff suppressed because it is too large
+ 4 - 0
static/webRtcYJ/jquery-1.11.0.min.js


File diff suppressed because it is too large
+ 13 - 0
static/webRtcYJ/jquery-weui.min.js


+ 130 - 0
static/webRtcYJ/pcm-player.js

@@ -0,0 +1,130 @@
+function PCMPlayer(option) {
+    this.init(option);
+}
+
+PCMPlayer.prototype.init = function (option) {
+    var defaults = {
+        encoding: '16bitInt',
+        channels: 1,
+        sampleRate: 8000,
+        flushingTime: 1000
+    };
+    this.option = Object.assign({}, defaults, option);
+    this.samples = new Float32Array();
+    this.flush = this.flush.bind(this);
+    this.interval = setInterval(this.flush, this.option.flushingTime);
+    this.maxValue = this.getMaxValue();
+    this.typedArray = this.getTypedArray();
+    this.createContext();
+};
+
+PCMPlayer.prototype.getMaxValue = function () {
+    var encodings = {
+        '8bitInt': 128,
+        '16bitInt': 32768,
+        '32bitInt': 2147483648,
+        '32bitFloat': 1
+    }
+
+    return encodings[this.option.encoding] ? encodings[this.option.encoding] : encodings['16bitInt'];
+};
+
+PCMPlayer.prototype.getTypedArray = function () {
+    var typedArrays = {
+        '8bitInt': Int8Array,
+        '16bitInt': Int16Array,
+        '32bitInt': Int32Array,
+        '32bitFloat': Float32Array
+    }
+
+    return typedArrays[this.option.encoding] ? typedArrays[this.option.encoding] : typedArrays['16bitInt'];
+};
+
+PCMPlayer.prototype.createContext = function () {
+    this.audioCtx = new (window.AudioContext || window.webkitAudioContext)();
+    console.log(this.audioCtx);
+    if (this.audioCtx.state === 'suspended' && 'ontouchstart' in window) {
+        var unlock = () => { this.audioCtx.resume(); };
+        document.body.addEventListener('touchstart', unlock, false);
+    }
+    this.gainNode = this.audioCtx.createGain();
+    this.gainNode.gain.value = 1;
+    this.gainNode.connect(this.audioCtx.destination);
+    this.startTime = this.audioCtx.currentTime;
+};
+
+PCMPlayer.prototype.isTypedArray = function (data) {
+    return (data.byteLength && data.buffer && data.buffer.constructor == ArrayBuffer);
+};
+
+PCMPlayer.prototype.feed = function (data) {
+    if (!this.isTypedArray(data)) return;
+    data = this.getFormatedValue(data);
+    var tmp = new Float32Array(this.samples.length + data.length);
+    tmp.set(this.samples, 0);
+    tmp.set(data, this.samples.length);
+    this.samples = tmp;
+};
+
+PCMPlayer.prototype.getFormatedValue = function (data) {
+    var data = new this.typedArray(data.buffer),
+        float32 = new Float32Array(data.length),
+        i;
+
+    for (i = 0; i < data.length; i++) {
+        float32[i] = data[i] / this.maxValue;
+    }
+    return float32;
+};
+
+PCMPlayer.prototype.volume = function (volume) {
+    this.gainNode.gain.value = volume;
+};
+
+PCMPlayer.prototype.destroy = function () {
+    if (this.interval) {
+        clearInterval(this.interval);
+    }
+    this.samples = null;
+    this.audioCtx.close();
+    this.audioCtx = null;
+};
+
+PCMPlayer.prototype.flush = function () {
+    if (!this.samples.length) return;
+    var bufferSource = this.audioCtx.createBufferSource(),
+        length = this.samples.length / this.option.channels,
+        audioBuffer = this.audioCtx.createBuffer(this.option.channels, length, this.option.sampleRate),
+        audioData,
+        channel,
+        offset,
+        i,
+        decrement;
+
+    for (channel = 0; channel < this.option.channels; channel++) {
+        audioData = audioBuffer.getChannelData(channel);
+        offset = channel;
+        decrement = 50;
+        for (i = 0; i < length; i++) {
+            audioData[i] = this.samples[offset];
+            /* fadein */
+            if (i < 50) {
+                audioData[i] = (audioData[i] * i) / 50;
+            }
+            /* fadeout*/
+            if (i >= (length - 51)) {
+                audioData[i] = (audioData[i] * decrement--) / 50;
+            }
+            offset += this.option.channels;
+        }
+    }
+
+    if (this.startTime < this.audioCtx.currentTime) {
+        this.startTime = this.audioCtx.currentTime;
+    }
+    bufferSource.buffer = audioBuffer;
+    bufferSource.connect(this.gainNode);
+    bufferSource.start(this.startTime);
+    this.startTime += audioBuffer.duration;
+    this.samples = new Float32Array();
+};

File diff suppressed because it is too large
+ 1 - 0
static/webRtcYJ/pcm-player.min.js


File diff suppressed because it is too large
+ 9103 - 0
static/webRtcYJ/rtcEngine.min.js


+ 209 - 0
static/webRtcYJ/spsParser.js

@@ -0,0 +1,209 @@
+ //https://blog.csdn.net/lizhijian21/article/details/80982403
+function ceil(val)
+{
+	return Math.ceil(val);
+}
+
+//获取buf 的前n个bit组成的值
+function u(bitCount, input)
+{
+	let ret = 0;
+	
+	for( var i = 0;i< bitCount;i++)
+	{
+		ret <<= 1;
+		
+		
+		if (input.data[Math.floor(input.index / 8)] & (0x80 >> (input.index % 8)))
+		{
+			ret += 1;
+		}
+		
+		input.index++;
+	}
+	
+	return ret;
+}
+
+/*无符号指数哥伦布编码(UE)
+*哥伦布编码的码字code_word由三部分组成:code_word = [M个0] + [1] + [Info]
+*其中,Info是一个携带信息的M位数据,每个哥伦布码的长度为(2M+1)位,每个码字都可由code_num产生。
+*根据码字code_word解码出code_num值的过程如下:
+*1. 首先读入M位以"1"为结尾的0;
+*2. 根据得到的M,读入接下来的M位Info数据;
+*3. 根据这个公式得到计算结果code_num = Info – 1 + 2M
+*/
+
+function ue(input, len)
+{
+	let zeroNum = 0;
+	while(input.index < len * 8)
+	{
+		if(input.data[Math.floor(input.index/8)] & (0x80 >> (input.index %8)))//遇到1则停止,统计0的个数
+		{
+			break;
+		}
+		
+		zeroNum++;
+		input.index++;
+	}
+	
+	input.index++;
+	
+	let ret = 0;
+	//计算
+	for(i = 0;i < zeroNum;i++)
+	{
+		ret <<= 1;
+		if(input.data[Math.floor(input.index/8)] & (0x80 >> input.index %8))
+		{
+			ret += 1;
+		}
+		
+		input.index++;
+	}
+	
+	return (1<< zeroNum) - 1 + ret;
+}
+
+//有符号哥伦布编码
+function se(input, len)
+{
+	let ueVal = ue(input, len);
+	let k = ueVal;
+	let nValue = ceil(k/2);
+	
+	if(ueVal %2 == 0)
+		nValue = -nValue;
+	return nValue;
+}
+
+
+function spsParser(buf)
+{
+	let startBitIndex = 0;
+
+	buf = buf.slice(4);//去除00 00 00 01竞争码
+	let len = buf.length;
+	
+	//输入参数
+	let input = {	
+		data:buf,
+		index:startBitIndex
+	};
+	
+	
+	let forbidden_zero_bit = u(1, input);
+	let nal_ref_idc = u(2, input);
+	let nal_unit_type = u(5, input);
+	let chroma_format_idc;
+	
+	if(nal_unit_type == 7) 
+	{
+		let profile_idc = u(8, input);
+		let constraint_set0_flag = u(1, input);
+		let constraint_set1_flag = u(1, input);
+		let constraint_set2_flag = u(1, input);
+		let constraint_set3_flag = u(1, input);	
+		let constraint_set4_flag = u(1, input);
+		let constraint_set5_flag = u(1, input);
+		
+		let reserved_zero_2bits = u(2, input);
+		let level_idc = u(8, input);
+		let seq_parameter_set_id = ue(input, len);
+		
+		if(profile_idc == 100 | profile_idc == 110 || profile_idc == 122 || profile_idc == 144 )
+		{				
+			chroma_format_idc = ue(input, len);
+
+			if(chroma_format_idc == 3)
+			{
+				var residual_colour_transform_flag = u(1, input);
+			}
+					
+			let bit_depth_luma_minus8 = ue(input, len);
+			let bit_depth_chroma_minus8 = ue(input, len);
+			let qpprime_y_zero_transform_bypass_flag = u(1, input);
+			let seq_scaling_matrix_present_flag = u(1, input);
+			
+			let seq_scaling_list_present_flag = new Uint8Array(8);
+			if (seq_scaling_matrix_present_flag)
+			{
+				for (var i = 0; i < 8; i++) 
+				{
+					seq_scaling_list_present_flag[i] = u(1, input);
+				}
+			}				
+		}
+		
+		
+		let log2_max_frame_num_minus4 = ue(input, len);
+		let pic_order_cnt_type = ue(input, len);
+		
+		if (pic_order_cnt_type == 0)
+			log2_max_pic_order_cnt_lsb_minus4 = ue(input, len);
+		
+		else if (pic_order_cnt_type == 1)
+		{
+			let delta_pic_order_always_zero_flag = u(1, input);
+			let offset_for_non_ref_pic = se(input, len);
+			let offset_for_top_to_bottom_field = se(input, len);
+			let num_ref_frames_in_pic_order_cnt_cycle = ue(input, len);
+
+			let offset_for_ref_frame = new Uint8Array[num_ref_frames_in_pic_order_cnt_cycle];
+			
+			for ( var i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i++)
+				offset_for_ref_frame[i] = se(input, len);
+		}
+		
+		let num_ref_frames = ue(input, len);
+		let gaps_in_frame_num_value_allowed_flag = u(1, input);
+		let pic_width_in_mbs_minus1 = ue(input, len);
+		let pic_height_in_map_units_minus1 = ue(input, len);
+
+		let width = (pic_width_in_mbs_minus1 + 1) * 16;//可能还要进行裁剪处理
+		let height = (pic_height_in_map_units_minus1 + 1) * 16;
+		
+		let frame_mbs_only_flag = u(1, input);
+		
+		if(!frame_mbs_only_flag)
+		{
+			u(1, input);
+		}
+		
+		let direct_8x8_inference_flag = u(1, input);
+		let frame_cropping_flag = u(1, input);
+		
+		if(frame_cropping_flag)
+		{
+			let frame_crop_left_offset = ue(input, len);
+            let frame_crop_right_offset = ue(input, len);
+            let frame_crop_top_offset = ue(input, len);
+            let frame_crop_bottom_offset = ue(input, len);
+			
+			let crop_unit_x = 1;
+            let crop_unit_y = 2 - frame_mbs_only_flag;
+			
+            if (chroma_format_idc == 1) {   //4:2:0
+                crop_unit_x = 2;
+                crop_unit_y = 2 * (2 - frame_mbs_only_flag);
+            }
+            else if (chroma_format_idc == 2) {    //4:2:2
+                crop_unit_x = 2;
+                crop_unit_y = 2 - frame_mbs_only_flag;
+            }
+			
+			
+			width -= crop_unit_x * (frame_crop_left_offset + frame_crop_right_offset);
+            height -= crop_unit_y * (frame_crop_top_offset + frame_crop_bottom_offset);
+		}
+		
+		return {
+			width:width,
+			height:height
+		}
+		
+	}
+}
+
+

+ 99 - 0
static/webRtcYJ/timer.js

@@ -0,0 +1,99 @@
+// function handleQuit() {
+//   if (userCardType === 1) {
+//     $.actions({
+//       actions: [{
+//         text: "退出",
+//         onClick: function () {
+//           decodeWoker.postMessage('close');
+//           decodeWoker.terminate();
+//           quit();
+//         }
+//       }, {
+//         text: "退出并下机",
+//         onClick: function () {
+//           downline();
+//         }
+//       }]
+//     });
+//   } else {
+//     decodeWoker.postMessage('close');
+//     decodeWoker.terminate();
+//     quit();
+//   }
+// }
+
+function handleQuit(action = "quit") {
+  switch (action) {
+    case 'quit':
+      return exit();
+    case 'dormant':
+      return downline();
+  }
+}
+
+// 退出
+function exit() {
+  decodeWoker.postMessage('close');
+  decodeWoker.terminate();
+  quit();
+}
+
+// 退出并下机
+function downline() {
+  $.ajax({
+    url: baseUrl + '/api/resources/yearMember/downline?userCardId=' + userCardId,
+    headers: {
+      Authorization: token
+    },
+    type: 'get',
+    dataType: 'json',
+    success: function (res) {
+      if (res.status === 0) {
+        decodeWoker.postMessage('close');
+        decodeWoker.terminate();
+        deleteDB(db, storeName, userCardId)
+        quit();
+      } else {
+        $.toast(res.msg, 'text');
+      }
+    },
+  });
+}
+
+function residueTimeStamp(value) {
+  let theTime = value;//秒
+  let middle = 0;//分
+  let hour = 0;//小时
+  if (theTime > 59) {
+    middle = parseInt(theTime / 60);
+    theTime = parseInt(theTime % 60);
+  }
+  if (middle > 59) {
+    hour = parseInt(middle / 60);
+    middle = parseInt(middle % 60);
+  }
+  theTime < 10 ? theTime = '0' + theTime : theTime = theTime
+  middle < 10 ? middle = '0' + middle : middle = middle
+  hour < 10 ? hour = '0' + hour : hour = hour
+  return hour + ':' + middle + ':' + theTime
+}
+function handleclose() {
+  $.ajax({
+    url: baseUrl + '/api/resources/yearMember/closeRemind?userCardId=' + userCardId,
+    headers: {
+      Authorization: token,
+    },
+    type: 'get',
+    dataType: 'json',
+    async: false,
+    success: function (res) {
+      if (res.status === 0) {
+        clearInterval(residueTimer);
+        $("#countView").hide();
+      } else {
+        $.toast(res.msg, 'text');
+      }
+    },
+  });
+}
+$.toast.prototype.defaults.duration = 4000

+ 148 - 0
static/webRtcYJ/webgl.js

@@ -0,0 +1,148 @@
+function Texture(gl) {
+    this.gl = gl;
+    this.texture = gl.createTexture();
+    gl.bindTexture(gl.TEXTURE_2D, this.texture);
+
+    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
+
+    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+}
+
+Texture.prototype.bind = function (n, program, name) {
+    var gl = this.gl;
+    gl.activeTexture([gl.TEXTURE0, gl.TEXTURE1, gl.TEXTURE2][n]);
+    gl.bindTexture(gl.TEXTURE_2D, this.texture);
+    gl.uniform1i(gl.getUniformLocation(program, name), n);
+};
+
+Texture.prototype.fill = function (width, height, data) {
+    var gl = this.gl;
+    gl.bindTexture(gl.TEXTURE_2D, this.texture);
+    gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width, height, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data);
+};
+
+function WebGLPlayer(canvas, options) {
+    this.canvas = canvas;
+    this.gl = canvas.getContext("webgl") || canvas.getContext("experimental-webgl");
+    this.initGL(options);
+}
+
+WebGLPlayer.prototype.initGL = function (options) {
+    if (!this.gl) {
+        console.log("[ER] WebGL not supported.");
+        return;
+    }
+
+    var gl = this.gl;
+    gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
+    var program = gl.createProgram();
+    var vertexShaderSource = [
+        "attribute highp vec4 aVertexPosition;",
+        "attribute vec2 aTextureCoord;",
+        "varying highp vec2 vTextureCoord;",
+        "void main(void) {",
+        " gl_Position = aVertexPosition;",
+        " vTextureCoord = aTextureCoord;",
+        "}"
+    ].join("\n");
+    var vertexShader = gl.createShader(gl.VERTEX_SHADER);
+    gl.shaderSource(vertexShader, vertexShaderSource);
+    gl.compileShader(vertexShader);
+    var fragmentShaderSource = [
+        "precision highp float;",
+        "varying lowp vec2 vTextureCoord;",
+        "uniform sampler2D YTexture;",
+        "uniform sampler2D UTexture;",
+        "uniform sampler2D VTexture;",
+        "const mat4 YUV2RGB = mat4",
+        "(",
+        " 1.1643828125, 0, 1.59602734375, -.87078515625,",
+        " 1.1643828125, -.39176171875, -.81296875, .52959375,",
+        " 1.1643828125, 2.017234375, 0, -1.081390625,",
+        " 0, 0, 0, 1",
+        ");",
+        "void main(void) {",
+        " gl_FragColor = vec4( texture2D(YTexture, vTextureCoord).x, texture2D(UTexture, vTextureCoord).x, texture2D(VTexture, vTextureCoord).x, 1) * YUV2RGB;",
+        "}"
+    ].join("\n");
+
+    var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
+    gl.shaderSource(fragmentShader, fragmentShaderSource);
+    gl.compileShader(fragmentShader);
+    gl.attachShader(program, vertexShader);
+    gl.attachShader(program, fragmentShader);
+    gl.linkProgram(program);
+    gl.useProgram(program);
+    if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
+        console.log("[ER] Shader link failed.");
+    }
+    var vertexPositionAttribute = gl.getAttribLocation(program, "aVertexPosition");
+    gl.enableVertexAttribArray(vertexPositionAttribute);
+    var textureCoordAttribute = gl.getAttribLocation(program, "aTextureCoord");
+    gl.enableVertexAttribArray(textureCoordAttribute);
+
+    var verticesBuffer = gl.createBuffer();
+    gl.bindBuffer(gl.ARRAY_BUFFER, verticesBuffer);
+    gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, -1.0, -1.0, 0.0]), gl.STATIC_DRAW);
+    gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
+    var texCoordBuffer = gl.createBuffer();
+    gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
+    gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0]), gl.STATIC_DRAW);
+    gl.vertexAttribPointer(textureCoordAttribute, 2, gl.FLOAT, false, 0, 0);
+
+    gl.y = new Texture(gl);
+    gl.u = new Texture(gl);
+    gl.v = new Texture(gl);
+    gl.y.bind(0, program, "YTexture");
+    gl.u.bind(1, program, "UTexture");
+    gl.v.bind(2, program, "VTexture");
+}
+
+WebGLPlayer.prototype.renderFrame = function (videoFrame, width, height, uOffset, vOffset) {
+    if (!this.gl) {
+        console.log("[ER] Render frame failed due to WebGL not supported.");
+        return;
+    }
+
+    var gl = this.gl;
+    gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
+    gl.clearColor(0.0, 0.0, 0.0, 0.0);
+    gl.clear(gl.COLOR_BUFFER_BIT);
+
+    gl.y.fill(width, height, videoFrame.subarray(0, uOffset));
+    gl.u.fill(width >> 1, height >> 1, videoFrame.subarray(uOffset, uOffset + vOffset));
+    gl.v.fill(width >> 1, height >> 1, videoFrame.subarray(uOffset + vOffset, videoFrame.length));
+
+    gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
+};
+
+WebGLPlayer.prototype.fullscreen = function () {
+	  var canvas = this.canvas;
+    if (canvas.RequestFullScreen) {
+        canvas.RequestFullScreen();
+    } else if (canvas.webkitRequestFullScreen) {
+        canvas.webkitRequestFullScreen();
+    } else if (canvas.mozRequestFullScreen) {
+        canvas.mozRequestFullScreen();
+    } else if (canvas.msRequestFullscreen) {
+        canvas.msRequestFullscreen();
+    } else {
+        alert("This browser doesn't supporter fullscreen");
+    }
+};
+
+WebGLPlayer.prototype.exitfullscreen = function (){
+    if (document.exitFullscreen) {
+        document.exitFullscreen();
+    } else if (document.webkitExitFullscreen) {
+        document.webkitExitFullscreen();
+    } else if (document.mozCancelFullScreen) {
+        document.mozCancelFullScreen();
+    } else if (document.msExitFullscreen) {
+        document.msExitFullscreen();
+    } else {
+        alert("Exit fullscreen doesn't work");
+    }
+}

+ 87 - 0
static/webRtcYJ/websocket.js

@@ -0,0 +1,87 @@
+self.importScripts('helper.js');
+var parameters = GetRequest();
+var cardToken = parameters['cardToken'];
+cardToken = cardToken && decodeURIComponent(cardToken);
+var errorTime = 0;
+var socketURL = decodeURIComponent(parameters['socketURL']);
+var intervaler;
+doConnect();
+
+function throttle(fn, delay) {
+  var flag = true;
+  return () => {
+    if (!flag) return;
+    flag = false;
+    errorTime += delay;
+    timer = setTimeout(() => {
+      fn();
+      flag = true;
+    }, delay);
+  };
+}
+
+function doConnect() {
+  var ws = new WebSocket(socketURL);
+  ws.binaryType = 'arraybuffer';
+  intervaler = setInterval(() => {
+    if (ws.readyState === 1) {
+      ws.send('ping');
+    } else {
+      clearInterval(intervaler);
+      self.postMessage('close');
+    }
+  }, 3000);
+  ws.onerror = function () {
+    ws.close(1006);
+  };
+  ws.addEventListener('close', function (e) {
+    if (e.code === 1006) {
+      clearInterval(intervaler);
+      throttle(doConnect, 100);
+      if (errorTime > 1000) {
+        self.postMessage('close');
+      }
+    }
+  });
+
+  ws.addEventListener('open', function (event) {
+    var verifyBuffer = VerifyCode('RK3923C1201900139', cardToken);
+    ws.send(verifyBuffer);
+    errorTime = 0;
+  });
+
+  ws.addEventListener('message', function (event) {
+    var input = new Uint8Array(event.data);
+    if (input[0] == 0xff) {
+      self.postMessage(input);
+    } else {
+      self.postMessage(input);
+    }
+  });
+
+  self.addEventListener(
+    'message',
+    function (e) {
+      if (e === 'close') {
+        clearInterval(intervaler);
+        ws.close();
+      } else {
+        ws.send(e.data);
+      }
+    },
+    false,
+  );
+}
+
+function GetRequest() {
+  var url = location.search; // 获取url中"?"符后的字串
+  var obj = new Object();
+  if (url.indexOf('?') != -1) {
+    var str = url.substr(1);
+    strs = str.split('&');
+    for (var i = 0; i < strs.length; i++) {
+      obj[strs[i].split('=')[0]] = strs[i].split('=')[1];
+    }
+  }
+  return obj;
+}