wuyongxiang преди 4 години
родител
ревизия
f32892a948

+ 11 - 11
screenAndroid/WXdraw.js

@@ -115,7 +115,7 @@ wsss.onopen = function() {
 		"event": "bitRate"
 	}
 	// wsss.send(JSON.stringify(bitRate));
-	wsss.send(ExexuteMove(JSON.stringify(bitRate))) 
+	wsss.send(ExexuteMove(JSON.stringify(bitRate)),data.sn) 
 };
 wsss.onmessage = function(event) {
 	// console.log("onMessage==============", event);
@@ -165,7 +165,7 @@ $(".botmat1img").on("click", function() {
 		// 	"event": "keyCode"
 		// }
 		// console.log("打印主页json", JSON.stringify(bitRate))
-		wsss.send(ExexuteKeyBoard(3));
+		wsss.send(ExexuteKeyBoard(3),data.sn);
 		// console.log("打印主页json", wsss)
 	} else if (codes == "return") {
 		// var bitRate = {
@@ -175,7 +175,7 @@ $(".botmat1img").on("click", function() {
 		// 	"event": "keyCode"
 		// }
 		// wsss.send(JSON.stringify(bitRate));
-		wsss.send(ExexuteKeyBoard(4));
+		wsss.send(ExexuteKeyBoard(4),data.sn);
 	} else if (codes == "gengduo") {
 		// var bitRate = {
 		// 	"data": {
@@ -184,7 +184,7 @@ $(".botmat1img").on("click", function() {
 		// 	"event": "keyCode"
 		// }
 		// wsss.send(JSON.stringify(bitRate));
-		wsss.send(ExexuteKeyBoard(187));
+		wsss.send(ExexuteKeyBoard(187),data.sn);
 	}
 
 })
@@ -198,7 +198,7 @@ $(".PictureQuality").on("click", function() {
 		},
 		"event": "bitRate"
 	}
-	wsss.send(JSON.stringify(bitRate));
+	wsss.send(ExexuteMove(JSON.stringify(bitRate)),data.sn) 
 	console.log(id)
 })
 
@@ -247,7 +247,7 @@ var draw_graph = function(graphType, obj) {
 				}
 			}
 			// wsss.send(JSON.stringify(ping));
-			wsss.send(ExexuteMove(JSON.stringify(ping))) 
+			wsss.send(ExexuteMove(JSON.stringify(ping)),data.sn) 
 			console.log("鼠标按下>>>", ping)
 		} else {
 			let ping
@@ -267,7 +267,7 @@ var draw_graph = function(graphType, obj) {
 
 			}
 			// wsss.send(JSON.stringify(ping));
-			wsss.send(ExexuteMove(JSON.stringify(ping))) 
+			wsss.send(ExexuteMove(JSON.stringify(ping)),data.sn) 
 			console.log("鼠标按下>>>", ping)
 		}
 
@@ -300,7 +300,7 @@ var draw_graph = function(graphType, obj) {
 			}
 
 			// wsss.send(JSON.stringify(ping));
-			wsss.send(ExexuteMove(JSON.stringify(ping))) 
+			wsss.send(ExexuteMove(JSON.stringify(ping)),data.sn) 
 
 		} else {
 			let ping
@@ -320,7 +320,7 @@ var draw_graph = function(graphType, obj) {
 			}
 
 			// wsss.send(JSON.stringify(ping));
-			wsss.send(ExexuteMove(JSON.stringify(ping))) 
+			wsss.send(ExexuteMove(JSON.stringify(ping)),data.sn) 
 		}
 
 		canDraw = false;
@@ -361,7 +361,7 @@ var draw_graph = function(graphType, obj) {
 			}
 
 			// wsss.send(JSON.stringify(ping));
-			wsss.send(ExexuteMove(JSON.stringify(ping))) 
+			wsss.send(ExexuteMove(JSON.stringify(ping)),data.sn) 
 		} else {
 			let ping
 			for (let i = 0; i < touchfor.length; i++) {
@@ -381,7 +381,7 @@ var draw_graph = function(graphType, obj) {
 			}
 
 			// wsss.send(JSON.stringify(ping));
-			wsss.send(ExexuteMove(JSON.stringify(ping))) 
+			wsss.send(ExexuteMove(JSON.stringify(ping)),data.sn) 
 		}
 
 

+ 55 - 32
screenAndroid/WXtrialInterface.html

@@ -63,11 +63,13 @@
 				<!-- <video id="video1" muted="muted" x5-video-orientation="landscape" playsinline="true" autoplay="true"
 				 webkit-playsinline="true" x5-playsinline="true" x5-video-player-type="h5" x5-video-player-fullscreen="false" style="pointer-events: none;"></video> -->
 				<!-- <div id="box"> -->
-				<video muted="muted" x5-video-orientation="landscape" playsinline="true" autoplay="true" webkit-playsinline="true"
-				 x5-playsinline="true" x5-video-player-type="h5" x5-video-player-fullscreen="false" style="pointer-events: none;width: 100%;height:100%"
-				 disablePictureInPicture="true" autoplay poster="images/loader-thumb.jpg" id="playerVideo"></video>
+				<video muted="muted" x5-video-orientation="landscape" playsinline="true" autoplay="true"
+					webkit-playsinline="true" x5-playsinline="true" x5-video-player-type="h5"
+					x5-video-player-fullscreen="false" style="pointer-events: none;width: 100%;height:100%"
+					disablePictureInPicture="true" autoplay poster="images/loader-thumb.jpg" id="playerVideo"></video>
 				<div id="box"></div>
-				<audio preload="auto" autoplay controls poster="images/loader-thumb.jpg" id="audioPlayer" style="position: absolute;top: 0;width: 0;height: 0;"></audio>
+				<audio preload="auto" autoplay controls poster="images/loader-thumb.jpg" id="audioPlayer"
+					style="position: absolute;top: 0;width: 0;height: 0;"></audio>
 			</div>
 
 
@@ -144,6 +146,7 @@
 		<script type="text/javascript" src="https://res.wx.qq.com/open/js/jweixin-1.3.2.js"></script>
 		<script src="../static/js/jquery-1.11.0.min.js"></script>
 		<script src="helper.js"></script>
+		<script type="text/javascript" src="pcm-player.js"></script>
 		<!--  音频
 		<!-- <script src="../static/js/classlist.js"></script>
 		<script src="../static/js/players.js"></script>
@@ -227,43 +230,52 @@
 				isFeed = false;
 			});
 
-			// myAudio.addEventListener('canplay', function() {
-			// 	console.log("缓冲区大小 %f", myAudio.buffered.end(0) - myAudio.buffered.start(0));
-			// });
+			myAudio.addEventListener('canplay', function() {
+				console.log("缓冲区大小 %f", myAudio.buffered.end(0) - myAudio.buffered.start(0));
+			});
+
+			var decodeCount = 1;
+			var isFinish = false;
+			var player = new PCMPlayer({
+				encoding: '16bitInt',
+				channels: 2,
+				sampleRate: 44100,
+				flushingTime: 22,
+				debug: false
+			});
+
+			Module = {};
+			Module.onRuntimeInitialized = function() {
+				console.log("Wasm 加载成功!")
+				isFinish = true;
+			}
 
-			/*function decodeAAC(data)
-			{
-				var retPtr = Module._malloc(4 * 5 * 1024);//接收的数据
-				var inputPtr = Module._malloc(4 * data.length);//输入数据
-				
-				for( i =0;i < data.length;i++)
-				{
-					Module.HEAPU8[(inputPtr)+i] = data[i];//转换为堆数据
+			function decodeAAC(data) {
+				var retPtr = Module._malloc(4 * 5 * 1024); //接收的数据
+				var inputPtr = Module._malloc(4 * data.length); //输入数据
+
+				for (i = 0; i < data.length; i++) {
+					Module.HEAPU8[(inputPtr) + i] = data[i]; //转换为堆数据
 				}
-				
+
 				var pcmLen = Module._feedData(retPtr, inputPtr, data.length);
-				
-				if(pcmLen > 0)
-				{
+
+				if (pcmLen >= 0) {
 					//console.log("%d帧 aac 解码成功, %d", decodeCount, pcmLen);
-					var pcmData = new Uint8Array(pcmLen);		
-					for(i = 0;i < pcmLen;i++)
-					{
-						pcmData[i] = Module.HEAPU8[(retPtr)+i]
+					var pcmData = new Uint8Array(pcmLen);
+					for (i = 0; i < pcmLen; i++) {
+						pcmData[i] = Module.HEAPU8[(retPtr) + i]
 					}
-					
+
 					player.feed(pcmData);
-				}
-				else
-				{
+				} else {
 					console.log("%d帧 aac 解码失败, %d", decodeCount, pcmLen);
 				}
-				
+
 				decodeCount++;
 				Module._free(inputPtr);
 				Module._free(retPtr);
-			}	*/
-
+			}
 
 			//解协议
 			function ParseProto(data) {
@@ -395,7 +407,15 @@
 						video: data.video,
 						duration: data.duration
 					};
+					var input = new Uint8Array(event.data);
+					// console.log("音频数据", input, input[0] == 0xff);
+					if (input[0] == 0xff) {
+						if (isFinish) {
+							var time = new Date().getTime();
+							decodeAAC(input);
+						}
 
+					} else {}
 					if (myAudio.readyState == 2) {
 						requestTime = new Date().getTime();
 						isEnough = false;
@@ -406,9 +426,11 @@
 						myAudio.play();
 						var time = new Date().getTime();
 						isEnough = true;
-						console.log("填满耗时 %d ms, 填充帧数 %d, 填充延迟 %d ms", time - requestTime, requestCount, requestCount * 23);
+						console.log("填满耗时 %d ms, 填充帧数 %d, 填充延迟 %d ms", time - requestTime, requestCount,
+							requestCount * 23);
 
-						console.log("----接收到启动 %d ms, 缓冲区 %f---", time - delayTime, myAudio.buffered.end(0) - myAudio.played.end(0));
+						console.log("----接收到启动 %d ms, 缓冲区 %f---", time - delayTime, myAudio.buffered.end(0) - myAudio
+							.played.end(0));
 					}
 
 					if (data.audio != null) //喂音频
@@ -971,6 +993,7 @@
 			}
 		</script>
 		<script type="text/javascript" src="jmuxer.js"></script>
+		<script type="text/text/javascript" src="aac.js"></script>
 	</body>
 
 </html>

+ 130 - 0
screenAndroid/aac.c

@@ -0,0 +1,130 @@
+#include <memory.h>
+#include <stdlib.h>
+#include "faad.h"
+#include <stdbool.h>
+#include <string.h>
+#include <emscripten.h>
+#include <stdio.h>
+#include <sys/time.h>
+#include <sys/timeb.h>
+#include <unistd.h>
+
+bool hasInit = false;
+
+NeAACDecHandle decoder = 0;
+NeAACDecFrameInfo frame_info;
+
+void PrintArry(unsigned char *buffer, unsigned int size)
+{
+	int i;
+	char data[1024*1024];
+	
+	for(i = 0;i < size;i++)
+	{
+		data[i] = buffer[i];
+	}
+	
+	data[i + 1] = '\0';
+}
+
+int init_decoder(unsigned char* inBuffer, size_t size)
+{  
+    unsigned char channels;
+    unsigned long sampleRate;
+    
+    memset(&frame_info, 0, sizeof(frame_info));
+    decoder = NeAACDecOpen();
+    NeAACDecInit(decoder, inBuffer, size, &sampleRate, &channels);
+    //printf("init_decoder初始化完毕\n");
+    hasInit = true;
+    return 0;
+}
+
+int feedData(unsigned char* out_data, unsigned char* buffer, unsigned int size)
+{
+	int ret = 0;
+	
+    if (!hasInit)
+    {
+        init_decoder(buffer, size);
+    }
+
+    unsigned char *out_buffer = (unsigned char*)NeAACDecDecode(decoder, &frame_info, buffer, size);
+	//printf("frame_info.error %d\n",frame_info.error);
+
+    if (frame_info.error > 0)
+    {		
+        return frame_info.error;
+    }
+    else if(out_buffer && frame_info.samples > 0)//解码成功
+    {
+		ret = frame_info.samples * frame_info.channels;
+		for(int i = 0;i < ret;i++)
+		{
+			 out_data[i] = out_buffer[i];
+		}
+    }
+
+    return ret;
+}
+
+void destroyDecoder()
+{
+	hasInit = false;
+    NeAACDecClose(decoder);
+}
+
+/*bool GetFrame(FILE *file, unsigned char *input, int *len, int *pos)
+{
+    int readByte;
+    int frameLen = 0;
+    unsigned char buffer[6];
+
+    while ((readByte = fread(buffer, 1, 6, file)) > 0)
+    {
+        if ((buffer[0] == 0xff) && ((buffer[1] & 0xf0) == 0xf0))
+        {         
+            frameLen = ((buffer[3] & 0x3) << 11) | ((buffer[4]) << 3) | ((buffer[5]) >> 5);
+            printf("帧长度 %d\n", frameLen);
+            *len = frameLen;
+            fseek(file, *pos, SEEK_SET);
+            fread(input, 1, frameLen, file);
+            *pos = *pos + frameLen;
+            return true;
+        }
+        else
+        {
+            printf("位置没找对\n");
+        }
+    }
+
+    return false;
+}
+
+int main(int argc, char* argv[])
+{
+    int len;
+    int pos = 0;
+    unsigned char buffer[4096] = {0};
+    unsigned char OutBuffer[10240];
+    unsigned char* pcmData = OutBuffer;
+    FILE* file = fopen("test.aac", "rb");
+	if(!file)
+	{
+		printf("找不到AAC文件\n");
+		return -1;
+	}
+	
+    outFile = fopen("shchu.pcm", "wb+");
+
+    while (GetFrame(file, buffer, &len, &pos))
+    {
+        feed_data(pcmData, buffer, len);     
+    }
+
+    fclose(file);
+	fclose(outFile);
+	destroy_decoder();
+	printf("解码完毕\n");
+    return 0;
+}*/

Файловите разлики са ограничени, защото са твърде много
+ 1 - 1
screenAndroid/aac.js


BIN
screenAndroid/aac.wasm


+ 4 - 0
screenAndroid/buildFaad.sh

@@ -0,0 +1,4 @@
+#cd /home/github/faad2-2_10_0
+. bootstrap
+emconfigure ./configure --prefix=/usr --enable-shared --without-xmms --without-drm --without-mpeg4ip
+emmake make

+ 24 - 0
screenAndroid/buildwasm.sh

@@ -0,0 +1,24 @@
+export TOTAL_MEMORY=10485760
+rm *.js *.wasm
+
+export EXPORTED_FUNCTIONS="[  	\
+	'_malloc' \
+	,'_free' \
+	,'_destroyDecoder' \
+	,'_feedData'   \
+]"
+
+export LIBRARY_FUNCTIONS="[\
+    'malloc', \
+    'free'	  \
+]"
+
+#
+emcc aac.c  \
+-O3 \
+-s WASM=1 \
+-I /usr/local -lfaad -lm -L/usr/local/lib  \
+-s TOTAL_MEMORY=${TOTAL_MEMORY} \
+-s DEFAULT_LIBRARY_FUNCS_TO_INCLUDE="${LIBRARY_FUNCTIONS}" \
+-s EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS}" \
+-o aac.js

+ 997 - 0
screenAndroid/decoder.c

@@ -0,0 +1,997 @@
+#include <stdio.h>
+#include <sys/time.h>
+#include <sys/timeb.h>
+#include <unistd.h>
+
+typedef void(*VideoCallback)(unsigned char *buff, int size, double timestamp);
+typedef void(*AudioCallback)(unsigned char *buff, int size, double timestamp);
+typedef void(*RequestCallback)(int offset, int available);
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "libavcodec/avcodec.h"
+#include "libavformat/avformat.h"
+#include "libavutil/fifo.h"
+//#include "libswscale/swscale.h"
+
+#define MIN(X, Y)  ((X) < (Y) ? (X) : (Y))
+
+const int kCustomIoBufferSize = 32 * 1024;
+const int kInitialPcmBufferSize = 128 * 1024;
+const int kDefaultFifoSize = 1 * 1024 * 1024;
+const int kMaxFifoSize = 16 * 1024 * 1024;
+
+typedef enum ErrorCode {
+    kErrorCode_Success = 0,
+    kErrorCode_Invalid_Param,
+    kErrorCode_Invalid_State,
+    kErrorCode_Invalid_Data,
+    kErrorCode_Invalid_Format,
+    kErrorCode_NULL_Pointer,
+    kErrorCode_Open_File_Error,
+    kErrorCode_Eof,
+    kErrorCode_FFmpeg_Error,
+    kErrorCode_Old_Frame
+} ErrorCode;
+
+typedef enum LogLevel {
+    kLogLevel_None, //Not logging.
+    kLogLevel_Core, //Only logging core module(without ffmpeg).
+    kLogLevel_All   //Logging all, with ffmpeg.
+} LogLevel;
+
+typedef struct WebDecoder {
+    AVFormatContext *avformatContext;
+    AVCodecContext *videoCodecContext;
+    AVCodecContext *audioCodecContext;
+    AVFrame *avFrame;
+    int videoStreamIdx;
+    int audioStreamIdx;
+    VideoCallback videoCallback;
+    AudioCallback audioCallback;
+    RequestCallback requestCallback;
+    unsigned char *yuvBuffer;
+    //unsigned char *rgbBuffer;
+    unsigned char *pcmBuffer;
+    int currentPcmBufferSize;
+    int videoBufferSize;
+    int videoSize;
+    //struct SwsContext* swsCtx;
+    unsigned char *customIoBuffer;
+    FILE *fp;
+    char fileName[64];
+    int64_t fileSize;
+    int64_t fileReadPos;
+    int64_t fileWritePos;
+    int64_t lastRequestOffset;
+    double beginTimeOffset;
+    int accurateSeek;
+    // For streaming.
+    int isStream;
+    AVFifoBuffer *fifo;
+    int fifoSize;
+} WebDecoder;
+
+WebDecoder *decoder = NULL;
+LogLevel logLevel = kLogLevel_None;
+
+int getAailableDataSize();
+
+unsigned long getTickCount() {
+    struct timespec ts;
+    clock_gettime(CLOCK_MONOTONIC, &ts);
+    return ts.tv_sec * (unsigned long)1000 + ts.tv_nsec / 1000000;
+}
+
+void simpleLog(const char* format, ...) {
+    if (logLevel == kLogLevel_None) {
+        return;
+    }
+
+    char szBuffer[1024] = { 0 };
+    char szTime[32]		= { 0 };
+    char *p				= NULL;
+    int prefixLength	= 0;
+    const char *tag		= "Core";
+    struct tm tmTime;
+    struct timeb tb;
+
+    ftime(&tb);
+    localtime_r(&tb.time, &tmTime);
+
+    if (1) {
+        int tmYear		= tmTime.tm_year + 1900;
+        int tmMon		= tmTime.tm_mon + 1;
+        int tmMday		= tmTime.tm_mday;
+        int tmHour		= tmTime.tm_hour;
+        int tmMin		= tmTime.tm_min;
+        int tmSec		= tmTime.tm_sec;
+        int tmMillisec	= tb.millitm;
+        sprintf(szTime, "%d-%d-%d %d:%d:%d.%d", tmYear, tmMon, tmMday, tmHour, tmMin, tmSec, tmMillisec);
+    }
+
+    prefixLength = sprintf(szBuffer, "[%s][%s][DT] ", szTime, tag);
+    p = szBuffer + prefixLength;
+    
+    if (1) {
+        va_list ap;
+        va_start(ap, format);
+        vsnprintf(p, 1024 - prefixLength, format, ap);
+        va_end(ap);
+    }
+
+    printf("%s\n", szBuffer);
+}
+
+void ffmpegLogCallback(void* ptr, int level, const char* fmt, va_list vl) {
+    static int printPrefix	= 1;
+    static int count		= 0;
+    static char prev[1024]	= { 0 };
+    char line[1024]			= { 0 };
+    static int is_atty;
+    AVClass* avc = ptr ? *(AVClass**)ptr : NULL;
+    if (level > AV_LOG_DEBUG) {
+        return;
+    }
+
+    line[0] = 0;
+
+    if (printPrefix && avc) {
+        if (avc->parent_log_context_offset) {
+            AVClass** parent = *(AVClass***)(((uint8_t*)ptr) + avc->parent_log_context_offset);
+            if (parent && *parent) {
+                snprintf(line, sizeof(line), "[%s @ %p] ", (*parent)->item_name(parent), parent);
+            }
+        }
+        snprintf(line + strlen(line), sizeof(line) - strlen(line), "[%s @ %p] ", avc->item_name(ptr), ptr);
+    }
+
+    vsnprintf(line + strlen(line), sizeof(line) - strlen(line), fmt, vl);
+    line[strlen(line) + 1] = 0;
+    simpleLog("%s", line);
+}
+
+int openCodecContext(AVFormatContext *fmtCtx, enum AVMediaType type, int *streamIdx, AVCodecContext **decCtx) {
+    int ret = 0;
+    do {
+        int streamIndex		= -1;
+        AVStream *st		= NULL;
+        AVCodec *dec		= NULL;
+        AVDictionary *opts	= NULL;
+
+        ret = av_find_best_stream(fmtCtx, type, -1, -1, NULL, 0);
+        if (ret < 0) {
+            simpleLog("Could not find %s stream.", av_get_media_type_string(type));
+            break;
+        }
+
+        streamIndex = ret;
+        st = fmtCtx->streams[streamIndex];
+
+        dec = avcodec_find_decoder(st->codecpar->codec_id);
+        if (!dec) {
+            simpleLog("Failed to find %s codec %d.", av_get_media_type_string(type), st->codecpar->codec_id);
+            ret = AVERROR(EINVAL);
+            break;
+        }
+
+        *decCtx = avcodec_alloc_context3(dec);
+        if (!*decCtx) {
+            simpleLog("Failed to allocate the %s codec context.", av_get_media_type_string(type));
+            ret = AVERROR(ENOMEM);
+            break;
+        }
+
+        if ((ret = avcodec_parameters_to_context(*decCtx, st->codecpar)) != 0) {
+            simpleLog("Failed to copy %s codec parameters to decoder context.", av_get_media_type_string(type));
+            break;
+        }
+
+        av_dict_set(&opts, "refcounted_frames", "0", 0);
+
+        if ((ret = avcodec_open2(*decCtx, dec, NULL)) != 0) {
+            simpleLog("Failed to open %s codec.", av_get_media_type_string(type));
+            break;
+        }
+
+        *streamIdx = streamIndex;
+        avcodec_flush_buffers(*decCtx);
+    } while (0);
+
+    return ret;
+}
+
+void closeCodecContext(AVFormatContext *fmtCtx, AVCodecContext *decCtx, int streamIdx) {
+    do {
+        if (fmtCtx == NULL || decCtx == NULL) {
+            break;
+        }
+
+        if (streamIdx < 0 || streamIdx >= fmtCtx->nb_streams) {
+            break;
+        }
+
+        fmtCtx->streams[streamIdx]->discard = AVDISCARD_ALL;
+        avcodec_close(decCtx);
+    } while (0);
+}
+
+ErrorCode copyYuvData(AVFrame *frame, unsigned char *buffer, int width, int height) {
+    ErrorCode ret		= kErrorCode_Success;
+    unsigned char *src	= NULL;
+    unsigned char *dst	= buffer;
+    int i = 0;
+    do {
+        if (frame == NULL || buffer == NULL) {
+            ret = kErrorCode_Invalid_Param;
+            break;
+        }
+
+        if (!frame->data[0] || !frame->data[1] || !frame->data[2]) {
+            ret = kErrorCode_Invalid_Param;
+            break;
+        }
+
+        for (i = 0; i < height; i++) {
+            src = frame->data[0] + i * frame->linesize[0];
+            memcpy(dst, src, width);
+            dst += width;
+        }
+
+        for (i = 0; i < height / 2; i++) {
+            src = frame->data[1] + i * frame->linesize[1];
+            memcpy(dst, src, width / 2);
+            dst += width / 2;
+        }
+
+        for (i = 0; i < height / 2; i++) {
+            src = frame->data[2] + i * frame->linesize[2];
+            memcpy(dst, src, width / 2);
+            dst += width / 2;
+        }
+    } while (0);
+    return ret;	
+}
+
+/*
+ErrorCode yuv420pToRgb32(unsigned char *yuvBuff, unsigned char *rgbBuff, int width, int height) {
+    ErrorCode ret = kErrorCode_Success;
+    AVPicture yuvPicture, rgbPicture;
+    uint8_t *ptmp = NULL;
+    do {
+        if (yuvBuff == NULL || rgbBuff == NULL) {
+            ret = kErrorCode_Invalid_Param
+            break;
+        }
+
+        if (decoder == NULL || decoder->swsCtx == NULL) {
+            ret = kErrorCode_Invalid_Param
+            break;
+        }
+
+        
+        avpicture_fill(&yuvPicture, yuvBuff, AV_PIX_FMT_YUV420P, width, height);
+        avpicture_fill(&rgbPicture, rgbBuff, AV_PIX_FMT_RGB32, width, height);
+
+        ptmp = yuvPicture.data[1];
+        yuvPicture.data[1] = yuvPicture.data[2];
+        yuvPicture.data[2] = ptmp;
+
+        sws_scale(decoder->swsCtx, yuvPicture.data, yuvPicture.linesize, 0, height, rgbPicture.data, rgbPicture.linesize);
+    } while (0);
+    return ret;
+}
+*/
+
+int roundUp(int numToRound, int multiple) {
+    return (numToRound + multiple - 1) & -multiple;
+}
+
+ErrorCode processDecodedVideoFrame(AVFrame *frame) {
+    ErrorCode ret = kErrorCode_Success;
+    double timestamp = 0.0f;
+    do {
+        if (frame == NULL ||
+            decoder->videoCallback == NULL ||
+            decoder->yuvBuffer == NULL ||
+            decoder->videoBufferSize <= 0) {
+            ret = kErrorCode_Invalid_Param;
+            break;
+        }
+
+        if (decoder->videoCodecContext->pix_fmt != AV_PIX_FMT_YUV420P) {
+            simpleLog("Not YUV420P, but unsupported format %d.", decoder->videoCodecContext->pix_fmt);
+            ret = kErrorCode_Invalid_Format;
+            break;
+        }
+
+        ret = copyYuvData(frame, decoder->yuvBuffer, decoder->videoCodecContext->width, decoder->videoCodecContext->height);
+        if (ret != kErrorCode_Success) {
+            break;
+        }
+
+        /*
+        ret = yuv420pToRgb32(decoder->yuvBuffer, decoder->rgbBuffer, decoder->videoCodecContext->width, decoder->videoCodecContext->height);
+        if (ret != kErrorCode_Success) {
+            break;
+        }
+        */
+
+        timestamp = (double)frame->pts * av_q2d(decoder->avformatContext->streams[decoder->videoStreamIdx]->time_base);
+
+        if (decoder->accurateSeek && timestamp < decoder->beginTimeOffset) {
+            //simpleLog("video timestamp %lf < %lf", timestamp, decoder->beginTimeOffset);
+            ret = kErrorCode_Old_Frame;
+            break;
+        }
+        decoder->videoCallback(decoder->yuvBuffer, decoder->videoSize, timestamp);
+    } while (0);
+    return ret;
+}
+
+ErrorCode processDecodedAudioFrame(AVFrame *frame) {
+    ErrorCode ret       = kErrorCode_Success;
+    int sampleSize      = 0;
+    int audioDataSize   = 0;
+    int targetSize      = 0;
+    int offset          = 0;
+    int i               = 0;
+    int ch              = 0;
+    double timestamp    = 0.0f;
+    do {
+        if (frame == NULL) {
+            ret = kErrorCode_Invalid_Param;
+            break;
+        }
+
+        sampleSize = av_get_bytes_per_sample(decoder->audioCodecContext->sample_fmt);
+        if (sampleSize < 0) {
+            simpleLog("Failed to calculate data size.");
+            ret = kErrorCode_Invalid_Data;
+            break;
+        }
+
+        if (decoder->pcmBuffer == NULL) {
+            decoder->pcmBuffer = (unsigned char*)av_mallocz(kInitialPcmBufferSize);
+            decoder->currentPcmBufferSize = kInitialPcmBufferSize;
+            simpleLog("Initial PCM buffer size %d.", decoder->currentPcmBufferSize);
+        }
+
+        audioDataSize = frame->nb_samples * decoder->audioCodecContext->channels * sampleSize;
+        if (decoder->currentPcmBufferSize < audioDataSize) {
+            targetSize = roundUp(audioDataSize, 4);
+            simpleLog("Current PCM buffer size %d not sufficient for data size %d, round up to target %d.",
+                decoder->currentPcmBufferSize,
+                audioDataSize,
+                targetSize);
+            decoder->currentPcmBufferSize = targetSize;
+            av_free(decoder->pcmBuffer);
+            decoder->pcmBuffer = (unsigned char*)av_mallocz(decoder->currentPcmBufferSize);
+        }
+
+        for (i = 0; i < frame->nb_samples; i++) {
+            for (ch = 0; ch < decoder->audioCodecContext->channels; ch++) {
+                memcpy(decoder->pcmBuffer + offset, frame->data[ch] + sampleSize * i, sampleSize);
+                offset += sampleSize;
+            }
+        }
+
+        timestamp = (double)frame->pts * av_q2d(decoder->avformatContext->streams[decoder->audioStreamIdx]->time_base);
+
+        if (decoder->accurateSeek && timestamp < decoder->beginTimeOffset) {
+            //simpleLog("audio timestamp %lf < %lf", timestamp, decoder->beginTimeOffset);
+            ret = kErrorCode_Old_Frame;
+            break;
+        }
+        if (decoder->audioCallback != NULL) {
+            decoder->audioCallback(decoder->pcmBuffer, audioDataSize, timestamp);
+        }
+    } while (0);
+    return ret;
+}
+
+ErrorCode decodePacket(AVPacket *pkt, int *decodedLen) {
+    int ret = 0;
+    int isVideo = 0;
+    AVCodecContext *codecContext = NULL;
+
+    if (pkt == NULL || decodedLen == NULL) {
+        simpleLog("decodePacket invalid param.");
+        return kErrorCode_Invalid_Param;
+    }
+
+    *decodedLen = 0;
+
+    if (pkt->stream_index == decoder->videoStreamIdx) {
+        codecContext = decoder->videoCodecContext;
+        isVideo = 1;
+    } else if (pkt->stream_index == decoder->audioStreamIdx) {
+        codecContext = decoder->audioCodecContext;
+        isVideo = 0;
+    } else {
+        return kErrorCode_Invalid_Data;
+    }
+
+    ret = avcodec_send_packet(codecContext, pkt);
+    if (ret < 0) {
+        simpleLog("Error sending a packet for decoding %d.", ret);
+        return kErrorCode_FFmpeg_Error;
+    }
+
+    while (ret >= 0) {
+        ret = avcodec_receive_frame(codecContext, decoder->avFrame);
+        if (ret == AVERROR(EAGAIN)) {
+            return kErrorCode_Success;
+        } else if (ret == AVERROR_EOF) {
+            return kErrorCode_Eof;
+        } else if (ret < 0) {
+            simpleLog("Error during decoding %d.", ret);
+            return kErrorCode_FFmpeg_Error;
+        } else {
+            int r = isVideo ? processDecodedVideoFrame(decoder->avFrame) : processDecodedAudioFrame(decoder->avFrame);
+            if (r == kErrorCode_Old_Frame) {
+                return r;
+            }
+        }
+    }
+
+    *decodedLen = pkt->size;
+    return kErrorCode_Success;
+}
+
+int readFromFile(uint8_t *data, int len) {
+    //simpleLog("readFromFile %d.", len);
+    int32_t ret         = -1;
+    int availableBytes  = 0;
+    int canReadLen      = 0;
+    do {
+        if (decoder->fp == NULL) {
+            break;
+        }
+
+        availableBytes = decoder->fileWritePos - decoder->fileReadPos;
+        if (availableBytes <= 0) {
+            break;
+        }
+
+        fseek(decoder->fp, decoder->fileReadPos, SEEK_SET);
+        canReadLen = MIN(availableBytes, len);
+        fread(data, canReadLen, 1, decoder->fp);
+        decoder->fileReadPos += canReadLen;
+        ret = canReadLen;
+    } while (0);
+    //simpleLog("readFromFile ret %d.", ret);
+    return ret;
+}
+
+int readFromFifo(uint8_t *data, int len) {
+    //simpleLog("readFromFifo %d.", len);
+    int32_t ret         = -1;
+    int availableBytes  = 0;
+    int canReadLen      = 0;
+    do {
+        if (decoder->fifo == NULL) {
+            break;
+        }	
+
+        availableBytes = av_fifo_size(decoder->fifo);
+        if (availableBytes <= 0) {
+            break;
+        }
+
+        canReadLen = MIN(availableBytes, len);
+        av_fifo_generic_read(decoder->fifo, data, canReadLen, NULL);
+        ret = canReadLen;
+    } while (0);
+    //simpleLog("readFromFifo ret %d, left %d.", ret, av_fifo_size(decoder->fifo));
+    return ret;
+}
+
+int readCallback(void *opaque, uint8_t *data, int len) {
+    //simpleLog("readCallback %d.", len);
+    int32_t ret         = -1;
+    do {
+        if (decoder == NULL) {
+            break;
+        }
+
+        if (data == NULL || len <= 0) {
+            break;
+        }		
+
+        ret = decoder->isStream ? readFromFifo(data, len) : readFromFile(data, len);
+    } while (0);
+    //simpleLog("readCallback ret %d.", ret);
+    return ret;
+}
+
+int64_t seekCallback(void *opaque, int64_t offset, int whence) {
+    int64_t ret         = -1;
+    int64_t pos         = -1;
+    int64_t req_pos     = -1;
+    //simpleLog("seekCallback %lld %d.", offset, whence);
+    do {
+        if (decoder == NULL || decoder->isStream || decoder->fp == NULL) {
+            break;
+        }
+
+        if (whence == AVSEEK_SIZE) {
+            ret = decoder->fileSize;
+            break;
+        }
+
+        if (whence != SEEK_END && whence != SEEK_SET && whence != SEEK_CUR) {
+            break;
+        }
+
+        ret = fseek(decoder->fp, (long)offset, whence);
+        if (ret == -1) {
+            break;
+        }
+
+        pos = (int64_t)ftell(decoder->fp);
+        if (pos < decoder->lastRequestOffset || pos > decoder->fileWritePos) {
+            decoder->lastRequestOffset  = pos;
+            decoder->fileReadPos        = pos;
+            decoder->fileWritePos       = pos;
+            req_pos                     = pos;
+            ret                         = -1;  // Forcing not to call read at once.
+            decoder->requestCallback(pos, getAailableDataSize());
+            simpleLog("Will request %lld and return %lld.", pos, ret);
+            break;
+        }
+
+        decoder->fileReadPos = pos;
+        ret = pos;
+    } while (0);
+    //simpleLog("seekCallback return %lld.", ret);
+
+    if (decoder != NULL && decoder->requestCallback != NULL) {
+        decoder->requestCallback(req_pos, getAailableDataSize());
+    }
+    return ret;
+}
+
+int writeToFile(unsigned char *buff, int size) {
+    int ret = 0;
+    int64_t leftBytes = 0;
+    int canWriteBytes = 0;
+    do {
+        if (decoder->fp == NULL) {
+            ret = -1;
+            break;
+        }
+
+        leftBytes = decoder->fileSize - decoder->fileWritePos;
+        if (leftBytes <= 0) {
+            break;
+        }
+
+        canWriteBytes = MIN(leftBytes, size);
+        fseek(decoder->fp, decoder->fileWritePos, SEEK_SET);
+        fwrite(buff, canWriteBytes, 1, decoder->fp);
+        decoder->fileWritePos += canWriteBytes;
+        ret = canWriteBytes;
+    } while (0);
+    return ret;
+}
+
+int writeToFifo(unsigned char *buff, int size) {
+    int ret = 0;
+    do {
+        if (decoder->fifo == NULL) {
+            ret = -1;
+            break;
+        }
+
+        int64_t leftSpace = av_fifo_space(decoder->fifo);
+        if (leftSpace < size) {
+            int growSize = 0;
+            do {
+                leftSpace += decoder->fifoSize;
+                growSize += decoder->fifoSize;
+                decoder->fifoSize += decoder->fifoSize;
+            } while (leftSpace < size);
+            av_fifo_grow(decoder->fifo, growSize);
+
+            simpleLog("Fifo size growed to %d.", decoder->fifoSize);
+            if (decoder->fifoSize >= kMaxFifoSize) {
+                simpleLog("[Warn] Fifo size larger than %d.", kMaxFifoSize);
+            }
+        }
+
+        //simpleLog("Wrote %d bytes to fifo, total %d.", size, av_fifo_size(decoder->fifo));
+        ret = av_fifo_generic_write(decoder->fifo, buff, size, NULL);
+    } while (0);
+    return ret;
+}
+
+int getAailableDataSize() {
+    int ret = 0;
+    do {
+        if (decoder == NULL) {
+            break;
+        }
+
+        if (decoder->isStream) {
+            ret = decoder->fifo == NULL ? 0 : av_fifo_size(decoder->fifo);
+        } else {
+            ret = decoder->fileWritePos - decoder->fileReadPos;
+        }
+    } while (0);
+    return ret;
+}
+
+//////////////////////////////////Export methods////////////////////////////////////////
+ErrorCode initDecoder(int fileSize, int logLv) {
+    ErrorCode ret = kErrorCode_Success;
+    do {
+        //Log level.
+        logLevel = logLv;
+
+        if (decoder != NULL) {
+            break;
+        }
+
+        decoder = (WebDecoder *)av_mallocz(sizeof(WebDecoder));
+        if (fileSize >= 0) {
+            decoder->fileSize = fileSize;
+            sprintf(decoder->fileName, "tmp-%lu.mp4", getTickCount());
+            decoder->fp = fopen(decoder->fileName, "wb+");
+            if (decoder->fp == NULL) {
+                simpleLog("Open file %s failed, err: %d.", decoder->fileName, errno);
+                ret = kErrorCode_Open_File_Error;
+                av_free(decoder);
+                decoder = NULL;
+            }
+        } else {
+            decoder->isStream = 1;
+            decoder->fifoSize = kDefaultFifoSize;
+            decoder->fifo = av_fifo_alloc(decoder->fifoSize);
+        }
+    } while (0);
+    simpleLog("Decoder initialized %d.", ret);
+    return ret;
+}
+
+ErrorCode uninitDecoder() {
+    if (decoder != NULL) {
+        if (decoder->fp != NULL) {
+            fclose(decoder->fp);
+            decoder->fp = NULL;
+            remove(decoder->fileName);
+        }
+
+        if (decoder->fifo != NULL) {
+             av_fifo_freep(&decoder->fifo);
+        }
+
+        av_freep(&decoder);
+    }
+
+    av_log_set_callback(NULL);
+
+    simpleLog("Decoder uninitialized.");
+    return kErrorCode_Success;
+}
+
+ErrorCode openDecoder(int *paramArray, int paramCount, long videoCallback, long audioCallback, long requestCallback) {
+    ErrorCode ret = kErrorCode_Success;
+    int r = 0;
+    int i = 0;
+    int params[7] = { 0 };
+    do {
+        simpleLog("打开编码器.");
+
+        av_register_all();
+        avcodec_register_all();
+
+        if (logLevel == kLogLevel_All) {
+            av_log_set_callback(ffmpegLogCallback);
+        }
+        
+        decoder->avformatContext = avformat_alloc_context();
+        decoder->customIoBuffer = (unsigned char*)av_mallocz(kCustomIoBufferSize);
+
+        AVIOContext* ioContext = avio_alloc_context(
+            decoder->customIoBuffer,
+            kCustomIoBufferSize,
+            0,
+            NULL,
+            readCallback,
+            NULL,
+            seekCallback);
+        if (ioContext == NULL) {
+            ret = kErrorCode_FFmpeg_Error;
+            simpleLog("avio_alloc_context failed.");
+            break;
+        }
+
+        decoder->avformatContext->pb = ioContext;
+        decoder->avformatContext->flags = AVFMT_FLAG_CUSTOM_IO;
+		simpleLog("avformat_open_input.");
+
+        r = avformat_open_input(&decoder->avformatContext, NULL, NULL, NULL);
+        if (r != 0) {
+            ret = kErrorCode_FFmpeg_Error;
+            char err_info[32] = { 0 };
+            av_strerror(ret, err_info, 32);
+            simpleLog("avformat_open_input failed %d %s.", ret, err_info);
+            break;
+        }
+        
+        simpleLog("avformat_find_stream_info");
+
+        r = avformat_find_stream_info(decoder->avformatContext, NULL);
+        if (r != 0) {
+            ret = kErrorCode_FFmpeg_Error;
+            simpleLog("av_find_stream_info failed %d.", ret);
+            break;
+        }
+			
+			
+	
+        simpleLog("avformat_find_stream_info 成功.");
+
+        for (i = 0; i < decoder->avformatContext->nb_streams; i++) {
+            decoder->avformatContext->streams[i]->discard = AVDISCARD_DEFAULT;
+        }
+
+        r = openCodecContext(
+            decoder->avformatContext,
+            AVMEDIA_TYPE_VIDEO,
+            &decoder->videoStreamIdx,
+            &decoder->videoCodecContext);
+        if (r != 0) {
+            ret = kErrorCode_FFmpeg_Error;
+            simpleLog("Open video codec context failed %d.", ret);
+            break;
+        }
+
+        simpleLog("Open video codec context success, video stream index %d %x.",
+            decoder->videoStreamIdx, (unsigned int)decoder->videoCodecContext);
+
+        simpleLog("Video stream index:%d pix_fmt:%d resolution:%d*%d.",
+            decoder->videoStreamIdx,
+            decoder->videoCodecContext->pix_fmt,
+            decoder->videoCodecContext->width,
+            decoder->videoCodecContext->height);
+
+        r = openCodecContext(
+            decoder->avformatContext,
+            AVMEDIA_TYPE_AUDIO,
+            &decoder->audioStreamIdx,
+            &decoder->audioCodecContext);
+        if (r != 0) {
+            ret = kErrorCode_FFmpeg_Error;
+            simpleLog("Open audio codec context failed %d.", ret);
+            break;
+        }
+
+        simpleLog("Open audio codec context success, audio stream index %d %x.",
+            decoder->audioStreamIdx, (unsigned int)decoder->audioCodecContext);
+
+        simpleLog("Audio stream index:%d sample_fmt:%d channel:%d, sample rate:%d.",
+            decoder->audioStreamIdx,
+            decoder->audioCodecContext->sample_fmt,
+            decoder->audioCodecContext->channels,
+            decoder->audioCodecContext->sample_rate);
+
+        av_seek_frame(decoder->avformatContext, -1, 0, AVSEEK_FLAG_BACKWARD);
+
+        /* For RGB Renderer(2D WebGL).
+        decoder->swsCtx = sws_getContext(
+            decoder->videoCodecContext->width,
+            decoder->videoCodecContext->height,
+            decoder->videoCodecContext->pix_fmt, 
+            decoder->videoCodecContext->width,
+            decoder->videoCodecContext->height,
+            AV_PIX_FMT_RGB32,
+            SWS_BILINEAR, 
+            0, 
+            0, 
+            0);
+        if (decoder->swsCtx == NULL) {
+            simpleLog("sws_getContext failed.");
+            ret = kErrorCode_FFmpeg_Error;
+            break;
+        }
+        */
+        
+        decoder->videoSize = avpicture_get_size(
+            decoder->videoCodecContext->pix_fmt,
+            decoder->videoCodecContext->width,
+            decoder->videoCodecContext->height);
+
+        decoder->videoBufferSize = 3 * decoder->videoSize;
+        decoder->yuvBuffer = (unsigned char *)av_mallocz(decoder->videoBufferSize);
+        decoder->avFrame = av_frame_alloc();
+        
+        params[0] = 1000 * (decoder->avformatContext->duration + 5000) / AV_TIME_BASE;
+        params[1] = decoder->videoCodecContext->pix_fmt;
+        params[2] = decoder->videoCodecContext->width;
+        params[3] = decoder->videoCodecContext->height;
+        params[4] = decoder->audioCodecContext->sample_fmt;
+        params[5] = decoder->audioCodecContext->channels;
+        params[6] = decoder->audioCodecContext->sample_rate;
+
+        enum AVSampleFormat sampleFmt = decoder->audioCodecContext->sample_fmt;
+        if (av_sample_fmt_is_planar(sampleFmt)) {
+            const char *packed = av_get_sample_fmt_name(sampleFmt);
+            params[4] = av_get_packed_sample_fmt(sampleFmt);
+        }
+
+        if (paramArray != NULL && paramCount > 0) {
+            for (int i = 0; i < paramCount; ++i) {
+                paramArray[i] = params[i];
+            }
+        }
+
+        decoder->videoCallback = (VideoCallback)videoCallback;
+        decoder->audioCallback = (AudioCallback)audioCallback;
+        decoder->requestCallback = (RequestCallback)requestCallback;
+
+        simpleLog("Decoder opened, duration %ds, picture size %d.", params[0], decoder->videoSize);
+    } while (0);
+
+    if (ret != kErrorCode_Success && decoder != NULL) {
+        av_freep(&decoder);
+    }
+    return ret;
+}
+
+ErrorCode closeDecoder() {
+    ErrorCode ret = kErrorCode_Success;
+    do {
+        if (decoder == NULL || decoder->avformatContext == NULL) {
+            break;
+        }
+
+        if (decoder->videoCodecContext != NULL) {
+            closeCodecContext(decoder->avformatContext, decoder->videoCodecContext, decoder->videoStreamIdx);
+            decoder->videoCodecContext = NULL;
+            simpleLog("Video codec context closed.");
+        }
+
+        if (decoder->audioCodecContext != NULL) {
+            closeCodecContext(decoder->avformatContext, decoder->audioCodecContext, decoder->audioStreamIdx);
+            decoder->audioCodecContext = NULL;
+            simpleLog("Audio codec context closed.");
+        }
+
+        AVIOContext *pb = decoder->avformatContext->pb;
+        if (pb != NULL) {
+            if (pb->buffer != NULL) {
+                av_freep(&pb->buffer);
+                decoder->customIoBuffer = NULL;
+            }
+            av_freep(&decoder->avformatContext->pb);
+            simpleLog("IO context released.");
+        }
+
+        avformat_close_input(&decoder->avformatContext);
+        decoder->avformatContext = NULL;
+        simpleLog("Input closed.");
+
+        if (decoder->yuvBuffer != NULL) {
+            av_freep(&decoder->yuvBuffer);
+        }
+
+        if (decoder->pcmBuffer != NULL) {
+            av_freep(&decoder->pcmBuffer);
+        }
+        
+        if (decoder->avFrame != NULL) {
+            av_freep(&decoder->avFrame);
+        }
+        simpleLog("All buffer released.");
+    } while (0);
+    return ret;
+}
+
+int sendData(unsigned char *buff, int size) {
+    int ret = 0;
+    int64_t leftBytes = 0;
+    int canWriteBytes = 0;
+    do {
+        if (decoder == NULL) {
+            ret = -1;
+            break;
+        }
+
+        if (buff == NULL || size == 0) {
+            ret = -2;
+            break;
+        }
+
+        ret = decoder->isStream ? writeToFifo(buff, size) : writeToFile(buff, size);
+    } while (0);
+    return ret;
+}
+
+ErrorCode decodeOnePacket() {
+    ErrorCode ret	= kErrorCode_Success;
+    int decodedLen	= 0;
+    int r			= 0;
+
+    AVPacket packet;
+    av_init_packet(&packet);
+    do {
+        if (decoder == NULL) {
+            ret = kErrorCode_Invalid_State;
+            break;
+        }
+
+        if (getAailableDataSize() <= 0) {
+            ret = kErrorCode_Invalid_State;
+            break;
+        }
+
+        packet.data = NULL;
+        packet.size = 0;
+
+        r = av_read_frame(decoder->avformatContext, &packet);
+        if (r == AVERROR_EOF) {
+            ret = kErrorCode_Eof;
+            break;
+        }
+
+        if (r < 0 || packet.size == 0) {
+            break;
+        }
+
+        do {
+            ret = decodePacket(&packet, &decodedLen);
+            if (ret != kErrorCode_Success) {
+                break;
+            }
+
+            if (decodedLen <= 0) {
+                break;
+            }
+
+            packet.data += decodedLen;
+            packet.size -= decodedLen;
+        } while (packet.size > 0);
+    } while (0);
+    av_packet_unref(&packet);
+    return ret;
+}
+
+ErrorCode seekTo(int ms, int accurateSeek) {
+    int ret = 0;
+    int64_t pts = (int64_t)ms * 1000;
+    decoder->accurateSeek = accurateSeek;
+    ret = avformat_seek_file(decoder->avformatContext,
+                                 -1,
+                                 INT64_MIN,
+                                 pts,
+                                 pts,
+                                 AVSEEK_FLAG_BACKWARD);
+    simpleLog("Native seek to %d return %d %d.", ms, ret, decoder->accurateSeek);
+    if (ret == -1) {
+        return kErrorCode_FFmpeg_Error;
+    } else {
+        avcodec_flush_buffers(decoder->videoCodecContext);
+        avcodec_flush_buffers(decoder->audioCodecContext);
+
+        // Trigger seek callback
+        AVPacket packet;
+        av_init_packet(&packet);
+        av_read_frame(decoder->avformatContext, &packet);
+
+        decoder->beginTimeOffset = (double)ms / 1000;
+        return kErrorCode_Success;
+    }
+}
+
+int main() {
+    //simpleLog("Native loaded.");
+    return 0;
+}
+
+#ifdef __cplusplus
+}
+#endif

+ 126 - 0
screenAndroid/pcm-player.js

@@ -0,0 +1,126 @@
+function PCMPlayer(option) {
+    this.init(option);
+}
+
+PCMPlayer.prototype.init = function(option) {
+    var defaults = {
+        encoding: '16bitInt',
+        channels: 1,
+        sampleRate: 8000,
+        flushingTime: 1000
+    };
+    this.option = Object.assign({}, defaults, option);
+    this.samples = new Float32Array();
+    this.flush = this.flush.bind(this);
+    this.interval = setInterval(this.flush, this.option.flushingTime);
+    this.maxValue = this.getMaxValue();
+    this.typedArray = this.getTypedArray();
+    this.createContext();
+};
+
+PCMPlayer.prototype.getMaxValue = function () {
+    var encodings = {
+        '8bitInt': 128,
+        '16bitInt': 32768,
+        '32bitInt': 2147483648,
+        '32bitFloat': 1
+    }
+
+    return encodings[this.option.encoding] ? encodings[this.option.encoding] : encodings['16bitInt'];
+};
+
+PCMPlayer.prototype.getTypedArray = function () {
+    var typedArrays = {
+        '8bitInt': Int8Array,
+        '16bitInt': Int16Array,
+        '32bitInt': Int32Array,
+        '32bitFloat': Float32Array
+    }
+
+    return typedArrays[this.option.encoding] ? typedArrays[this.option.encoding] : typedArrays['16bitInt'];
+};
+
+PCMPlayer.prototype.createContext = function() {
+    this.audioCtx = new (window.AudioContext || window.webkitAudioContext)();
+    this.gainNode = this.audioCtx.createGain();
+    this.gainNode.gain.value = 1;
+    this.gainNode.connect(this.audioCtx.destination);
+    this.startTime = this.audioCtx.currentTime;
+};
+
+PCMPlayer.prototype.isTypedArray = function(data) {
+    return (data.byteLength && data.buffer && data.buffer.constructor == ArrayBuffer);
+};
+
+PCMPlayer.prototype.feed = function(data) {
+    if (!this.isTypedArray(data)) return;
+    data = this.getFormatedValue(data);
+    var tmp = new Float32Array(this.samples.length + data.length);
+    tmp.set(this.samples, 0);
+    tmp.set(data, this.samples.length);
+    this.samples = tmp;
+};
+
+PCMPlayer.prototype.getFormatedValue = function(data) {
+    var data = new this.typedArray(data.buffer),
+        float32 = new Float32Array(data.length),
+        i;
+
+    for (i = 0; i < data.length; i++) {
+        float32[i] = data[i] / this.maxValue;
+    }
+    return float32;
+};
+
+PCMPlayer.prototype.volume = function(volume) {
+    this.gainNode.gain.value = volume;
+};
+
+PCMPlayer.prototype.destroy = function() {
+    if (this.interval) {
+        clearInterval(this.interval);
+    }
+    this.samples = null;
+    this.audioCtx.close();
+    this.audioCtx = null;
+};
+
+PCMPlayer.prototype.flush = function() {
+    if (!this.samples.length) return;
+    var bufferSource = this.audioCtx.createBufferSource(),
+        length = this.samples.length / this.option.channels,
+        audioBuffer = this.audioCtx.createBuffer(this.option.channels, length, this.option.sampleRate),
+        audioData,
+        channel,
+        offset,
+        i,
+        decrement;
+
+    for (channel = 0; channel < this.option.channels; channel++) {
+        audioData = audioBuffer.getChannelData(channel);
+        offset = channel;
+        decrement = 50;
+        for (i = 0; i < length; i++) {
+            audioData[i] = this.samples[offset];
+            /* fadein */
+            if (i < 50) {
+                audioData[i] =  (audioData[i] * i) / 50;
+            }
+            /* fadeout*/
+            if (i >= (length - 51)) {
+                audioData[i] =  (audioData[i] * decrement--) / 50;
+            }
+            offset += this.option.channels;
+        }
+    }
+    
+    if (this.startTime < this.audioCtx.currentTime) {
+        this.startTime = this.audioCtx.currentTime;
+    }
+    //console.log('start vs current '+this.startTime+' vs '+this.audioCtx.currentTime+' duration: '+audioBuffer.duration);
+    bufferSource.buffer = audioBuffer;
+    bufferSource.connect(this.gainNode);
+    bufferSource.start(this.startTime);
+    this.startTime += audioBuffer.duration;
+    this.samples = new Float32Array();
+};

+ 13 - 0
screenIos/Makefile

@@ -0,0 +1,13 @@
+lib = -lfaad -lm 
+include = -I/home/MyDocumet/faadjs
+path = -L/usr/lib
+CC = gcc
+
+all: aac.o
+	$(CC) aac.o $(include) $(lib) $(path) -o aacTest
+
+aac.o:aac.c
+	echo "生成aac.o"
+	$(CC) aac.c -c $(include) -o aac.o
+clean:
+	rm *.o aacTest

+ 19 - 12
screenIos/WXdraw.js

@@ -14,7 +14,12 @@ var winHeight = window.screen.height - window.innerHeight
 var vowidth = window.screen.width
 var topwinHeightDraw = window.screen.height - window.innerHeight + 30; //计算title top 头部
 var numse = window.screen.height //-winHeight
-
+// function getUrlParam(name) {
+// var reg = new RegExp("(^|&)" + name + "=([^&]*)(&|$)"); //构造一个含有目标参数的正则表达式对象
+// var r = window.location.search.substr(1).match(reg); //匹配目标参数
+// if (r != null) return unescape(r[2]); return null; //返回参数值
+// }
+// var sn = getUrlParam('sn');
 //计算title top 头部
 if (numse <= 70) {
 
@@ -115,7 +120,7 @@ wsss.onopen = function() {
 		"event": "bitRate"
 	}
 	// wsss.send(JSON.stringify(bitRate));
-	wsss.send(ExexuteMove(JSON.stringify(bitRate))) 
+	wsss.send(ExexuteMove(JSON.stringify(bitRate)),data.sn) 
 };
 wsss.onmessage = function(event) {
 	// console.log("onMessage==============", event);
@@ -165,7 +170,7 @@ $(".botmat1img").on("click", function() {
 		// 	"event": "keyCode"
 		// }
 		// console.log("打印主页json", JSON.stringify(bitRate))
-		wsss.send(ExexuteKeyBoard(3));
+		wsss.send(ExexuteKeyBoard(3),data.sn);
 		// console.log("打印主页json", wsss)
 	} else if (codes == "return") {
 		// var bitRate = {
@@ -175,7 +180,7 @@ $(".botmat1img").on("click", function() {
 		// 	"event": "keyCode"
 		// }
 		// wsss.send(JSON.stringify(bitRate));
-		wsss.send(ExexuteKeyBoard(4));
+		wsss.send(ExexuteKeyBoard(4),data.sn);
 	} else if (codes == "gengduo") {
 		// var bitRate = {
 		// 	"data": {
@@ -184,7 +189,7 @@ $(".botmat1img").on("click", function() {
 		// 	"event": "keyCode"
 		// }
 		// wsss.send(JSON.stringify(bitRate));
-		wsss.send(ExexuteKeyBoard(187));
+		wsss.send(ExexuteKeyBoard(187),data.sn);
 	}
 
 })
@@ -198,7 +203,8 @@ $(".PictureQuality").on("click", function() {
 		},
 		"event": "bitRate"
 	}
-	wsss.send(JSON.stringify(bitRate));
+	// wsss.send(JSON.stringify(bitRate));
+	wsss.send(ExexuteMove(JSON.stringify(bitRate)),data.sn) 
 	console.log(id)
 })
 
@@ -247,7 +253,7 @@ var draw_graph = function(graphType, obj) {
 				}
 			}
 			// wsss.send(JSON.stringify(ping));
-			wsss.send(ExexuteMove(JSON.stringify(ping))) 
+			wsss.send(ExexuteMove(JSON.stringify(ping)),data.sn) 
 			console.log("鼠标按下>>>", ping)
 		} else {
 			let ping
@@ -267,7 +273,7 @@ var draw_graph = function(graphType, obj) {
 
 			}
 			// wsss.send(JSON.stringify(ping));
-			wsss.send(ExexuteMove(JSON.stringify(ping))) 
+			wsss.send(ExexuteMove(JSON.stringify(ping)),data.sn) 
 			console.log("鼠标按下>>>", ping)
 		}
 
@@ -300,7 +306,7 @@ var draw_graph = function(graphType, obj) {
 			}
 
 			// wsss.send(JSON.stringify(ping));
-			wsss.send(ExexuteMove(JSON.stringify(ping))) 
+			wsss.send(ExexuteMove(JSON.stringify(ping)),data.sn) 
 
 		} else {
 			let ping
@@ -320,7 +326,7 @@ var draw_graph = function(graphType, obj) {
 			}
 
 			// wsss.send(JSON.stringify(ping));
-			wsss.send(ExexuteMove(JSON.stringify(ping))) 
+			wsss.send(ExexuteMove(JSON.stringify(ping)),data.sn) 
 		}
 
 		canDraw = false;
@@ -361,7 +367,7 @@ var draw_graph = function(graphType, obj) {
 			}
 
 			// wsss.send(JSON.stringify(ping));
-			wsss.send(ExexuteMove(JSON.stringify(ping))) 
+			wsss.send(ExexuteMove(JSON.stringify(ping)),data.sn) 
 		} else {
 			let ping
 			for (let i = 0; i < touchfor.length; i++) {
@@ -381,7 +387,7 @@ var draw_graph = function(graphType, obj) {
 			}
 
 			// wsss.send(JSON.stringify(ping));
-			wsss.send(ExexuteMove(JSON.stringify(ping))) 
+			wsss.send(ExexuteMove(JSON.stringify(ping)),data.sn) 
 		}
 
 
@@ -412,3 +418,4 @@ var draw_graph = function(graphType, obj) {
 function chooseImg(obj) {
 
 }
+

+ 52 - 8
screenIos/WXtrialInterface.html

@@ -62,7 +62,7 @@
 				<!-- <video id="video1" muted="muted" x5-video-orientation="landscape" playsinline="true" autoplay="true"
 				 webkit-playsinline="true" x5-playsinline="true" x5-video-player-type="h5" x5-video-player-fullscreen="false" style="pointer-events: none;"></video> -->
 				<div id="box">
-					<canvas id="playCanvas" width="450" height="800"></canvas> 
+					<canvas id="playCanvas" width="450" height="800"></canvas>
 				</div>
 			</div>
 			<div class="leftmains">
@@ -647,9 +647,48 @@
 				document.execCommand("copy");
 
 			}
+			var decodeCount = 1;
+			var isFinish = false;
+			var player = new PCMPlayer({
+				encoding: '16bitInt',
+				channels: 2,
+				sampleRate: 44100,
+				flushingTime: 22,
+				debug: false
+			});
 
+			Module = {};
+			Module.onRuntimeInitialized = function() {
+				console.log("Wasm 加载成功!")
+				isFinish = true;
+			}
 
+			function decodeAAC(data) {
+				var retPtr = Module._malloc(4 * 5 * 1024); //接收的数据
+				var inputPtr = Module._malloc(4 * data.length); //输入数据
 
+				for (i = 0; i < data.length; i++) {
+					Module.HEAPU8[(inputPtr) + i] = data[i]; //转换为堆数据
+				}
+
+				var pcmLen = Module._feedData(retPtr, inputPtr, data.length);
+
+				if (pcmLen >= 0) {
+					//console.log("%d帧 aac 解码成功, %d", decodeCount, pcmLen);
+					var pcmData = new Uint8Array(pcmLen);
+					for (i = 0; i < pcmLen; i++) {
+						pcmData[i] = Module.HEAPU8[(retPtr) + i]
+					}
+
+					player.feed(pcmData);
+				} else {
+					console.log("%d帧 aac 解码失败, %d", decodeCount, pcmLen);
+				}
+
+				decodeCount++;
+				Module._free(inputPtr);
+				Module._free(retPtr);
+			}
 
 			var decodeWoker = new Worker('decoder.js');
 			var myVideo = document.getElementById("playCanvas");
@@ -689,9 +728,14 @@
 
 
 			webSocketWorker.onmessage = function(event) {
-				// console.log("event.data", event.data)
 				decodeWoker.postMessage(event.data);
-				// decodeWoker.postMessage(wsSendfun())
+				// var input = new Uint8Array(decodeWoker.postMessage(event.data));
+				var input = event.data
+				// decodeAAC(event.data)
+				// console.log("音频", event.data);
+				if (input[0] == 0xff) {
+					decodeAAC(input);
+				} else {}
 			}
 
 			document.addEventListener("visibilitychange", () => {
@@ -747,7 +791,7 @@
 				if (event.button == 0) {
 					var posX = event.offsetX * 720 * 1.0 / myVideo.clientWidth;
 					var posY = event.offsetY * 1280 * 1.0 / myVideo.clientHeight;
-					var buffer = ExexuteMouseDown(posX.toString(), posY.toString());
+					var buffer = ExexuteMouseDown(posX.toString(), posY.toString(),data.sn);
 					console.log('55555', buffer)
 					webSocketWorker.postMessage(buffer);
 					isDrag = true;
@@ -759,7 +803,7 @@
 				if (isDrag && event.button == 0) {
 					var posX = event.offsetX * 720 * 1.0 / myVideo.clientWidth;
 					var posY = event.offsetY * 1280 * 1.0 / myVideo.clientHeight;
-					var buffer = ExexuteMouseMove(posX.toString(), posY.toString());
+					var buffer = ExexuteMouseMove(posX.toString(), posY.toString(),data.sn);
 					// ws.send(buffer);
 					console.log('55555', buffer)
 					webSocketWorker.postMessage(buffer);
@@ -773,7 +817,7 @@
 				isDrag = false;
 				var posX = event.offsetX * 720 * 1.0 / myVideo.clientWidth;
 				var posY = event.offsetY * 1280 * 1.0 / myVideo.clientHeight;
-				var buffer = ExexuteMouseUp(posX.toString(), posY.toString());
+				var buffer = ExexuteMouseUp(posX.toString(), posY.toString(),data.sn);
 				// ws.send(buffer);
 				webSocketWorker.postMessage(buffer);
 
@@ -781,10 +825,10 @@
 
 			myVideo.onkeydown = function(event) {
 				console.log('5555555555')
-				ExexuteKeyDown(e.keyCode);
+				ExexuteKeyDown(e.keyCode,data.sn);
 			}
 		</script>
-
+		<script type="text/javascript" src="aac.js"></script>
 	</body>
 
 </html>

+ 130 - 0
screenIos/aac.c

@@ -0,0 +1,130 @@
+#include <memory.h>
+#include <stdlib.h>
+#include "faad.h"
+#include <stdbool.h>
+#include <string.h>
+#include <emscripten.h>
+#include <stdio.h>
+#include <sys/time.h>
+#include <sys/timeb.h>
+#include <unistd.h>
+
+bool hasInit = false;
+
+NeAACDecHandle decoder = 0;
+NeAACDecFrameInfo frame_info;
+
+void PrintArry(unsigned char *buffer, unsigned int size)
+{
+	int i;
+	char data[1024*1024];
+	
+	for(i = 0;i < size;i++)
+	{
+		data[i] = buffer[i];
+	}
+	
+	data[i + 1] = '\0';
+}
+
+int init_decoder(unsigned char* inBuffer, size_t size)
+{  
+    unsigned char channels;
+    unsigned long sampleRate;
+    
+    memset(&frame_info, 0, sizeof(frame_info));
+    decoder = NeAACDecOpen();
+    NeAACDecInit(decoder, inBuffer, size, &sampleRate, &channels);
+    //printf("init_decoder初始化完毕\n");
+    hasInit = true;
+    return 0;
+}
+
+int feedData(unsigned char* out_data, unsigned char* buffer, unsigned int size)
+{
+	int ret = 0;
+	
+    if (!hasInit)
+    {
+        init_decoder(buffer, size);
+    }
+
+    unsigned char *out_buffer = (unsigned char*)NeAACDecDecode(decoder, &frame_info, buffer, size);
+	//printf("frame_info.error %d\n",frame_info.error);
+
+    if (frame_info.error > 0)
+    {		
+        return frame_info.error;
+    }
+    else if(out_buffer && frame_info.samples > 0)//解码成功
+    {
+		ret = frame_info.samples * frame_info.channels;
+		for(int i = 0;i < ret;i++)
+		{
+			 out_data[i] = out_buffer[i];
+		}
+    }
+
+    return ret;
+}
+
+void destroyDecoder()
+{
+	hasInit = false;
+    NeAACDecClose(decoder);
+}
+
+/*bool GetFrame(FILE *file, unsigned char *input, int *len, int *pos)
+{
+    int readByte;
+    int frameLen = 0;
+    unsigned char buffer[6];
+
+    while ((readByte = fread(buffer, 1, 6, file)) > 0)
+    {
+        if ((buffer[0] == 0xff) && ((buffer[1] & 0xf0) == 0xf0))
+        {         
+            frameLen = ((buffer[3] & 0x3) << 11) | ((buffer[4]) << 3) | ((buffer[5]) >> 5);
+            printf("帧长度 %d\n", frameLen);
+            *len = frameLen;
+            fseek(file, *pos, SEEK_SET);
+            fread(input, 1, frameLen, file);
+            *pos = *pos + frameLen;
+            return true;
+        }
+        else
+        {
+            printf("位置没找对\n");
+        }
+    }
+
+    return false;
+}
+
+int main(int argc, char* argv[])
+{
+    int len;
+    int pos = 0;
+    unsigned char buffer[4096] = {0};
+    unsigned char OutBuffer[10240];
+    unsigned char* pcmData = OutBuffer;
+    FILE* file = fopen("test.aac", "rb");
+	if(!file)
+	{
+		printf("找不到AAC文件\n");
+		return -1;
+	}
+	
+    outFile = fopen("shchu.pcm", "wb+");
+
+    while (GetFrame(file, buffer, &len, &pos))
+    {
+        feed_data(pcmData, buffer, len);     
+    }
+
+    fclose(file);
+	fclose(outFile);
+	destroy_decoder();
+	printf("解码完毕\n");
+    return 0;
+}*/

Файловите разлики са ограничени, защото са твърде много
+ 1 - 0
screenIos/aac.js


BIN
screenIos/aac.wasm


+ 4 - 0
screenIos/buildFaad.sh

@@ -0,0 +1,4 @@
+#cd /home/github/faad2-2_10_0
+. bootstrap
+emconfigure ./configure --prefix=/usr --enable-shared --without-xmms --without-drm --without-mpeg4ip
+emmake make

+ 24 - 0
screenIos/buildwasm.sh

@@ -0,0 +1,24 @@
+export TOTAL_MEMORY=10485760
+rm *.js *.wasm
+
+export EXPORTED_FUNCTIONS="[  	\
+	'_malloc' \
+	,'_free' \
+	,'_destroyDecoder' \
+	,'_feedData'   \
+]"
+
+export LIBRARY_FUNCTIONS="[\
+    'malloc', \
+    'free'	  \
+]"
+
+#
+emcc aac.c  \
+-O3 \
+-s WASM=1 \
+-I /usr/local -lfaad -lm -L/usr/local/lib  \
+-s TOTAL_MEMORY=${TOTAL_MEMORY} \
+-s DEFAULT_LIBRARY_FUNCS_TO_INCLUDE="${LIBRARY_FUNCTIONS}" \
+-s EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS}" \
+-o aac.js

+ 997 - 0
screenIos/decoder.c

@@ -0,0 +1,997 @@
+#include <stdio.h>
+#include <sys/time.h>
+#include <sys/timeb.h>
+#include <unistd.h>
+
+typedef void(*VideoCallback)(unsigned char *buff, int size, double timestamp);
+typedef void(*AudioCallback)(unsigned char *buff, int size, double timestamp);
+typedef void(*RequestCallback)(int offset, int available);
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "libavcodec/avcodec.h"
+#include "libavformat/avformat.h"
+#include "libavutil/fifo.h"
+//#include "libswscale/swscale.h"
+
+#define MIN(X, Y)  ((X) < (Y) ? (X) : (Y))
+
+const int kCustomIoBufferSize = 32 * 1024;
+const int kInitialPcmBufferSize = 128 * 1024;
+const int kDefaultFifoSize = 1 * 1024 * 1024;
+const int kMaxFifoSize = 16 * 1024 * 1024;
+
+typedef enum ErrorCode {
+    kErrorCode_Success = 0,
+    kErrorCode_Invalid_Param,
+    kErrorCode_Invalid_State,
+    kErrorCode_Invalid_Data,
+    kErrorCode_Invalid_Format,
+    kErrorCode_NULL_Pointer,
+    kErrorCode_Open_File_Error,
+    kErrorCode_Eof,
+    kErrorCode_FFmpeg_Error,
+    kErrorCode_Old_Frame
+} ErrorCode;
+
+typedef enum LogLevel {
+    kLogLevel_None, //Not logging.
+    kLogLevel_Core, //Only logging core module(without ffmpeg).
+    kLogLevel_All   //Logging all, with ffmpeg.
+} LogLevel;
+
+typedef struct WebDecoder {
+    AVFormatContext *avformatContext;
+    AVCodecContext *videoCodecContext;
+    AVCodecContext *audioCodecContext;
+    AVFrame *avFrame;
+    int videoStreamIdx;
+    int audioStreamIdx;
+    VideoCallback videoCallback;
+    AudioCallback audioCallback;
+    RequestCallback requestCallback;
+    unsigned char *yuvBuffer;
+    //unsigned char *rgbBuffer;
+    unsigned char *pcmBuffer;
+    int currentPcmBufferSize;
+    int videoBufferSize;
+    int videoSize;
+    //struct SwsContext* swsCtx;
+    unsigned char *customIoBuffer;
+    FILE *fp;
+    char fileName[64];
+    int64_t fileSize;
+    int64_t fileReadPos;
+    int64_t fileWritePos;
+    int64_t lastRequestOffset;
+    double beginTimeOffset;
+    int accurateSeek;
+    // For streaming.
+    int isStream;
+    AVFifoBuffer *fifo;
+    int fifoSize;
+} WebDecoder;
+
+WebDecoder *decoder = NULL;
+LogLevel logLevel = kLogLevel_None;
+
+int getAailableDataSize();
+
+unsigned long getTickCount() {
+    struct timespec ts;
+    clock_gettime(CLOCK_MONOTONIC, &ts);
+    return ts.tv_sec * (unsigned long)1000 + ts.tv_nsec / 1000000;
+}
+
+void simpleLog(const char* format, ...) {
+    if (logLevel == kLogLevel_None) {
+        return;
+    }
+
+    char szBuffer[1024] = { 0 };
+    char szTime[32]		= { 0 };
+    char *p				= NULL;
+    int prefixLength	= 0;
+    const char *tag		= "Core";
+    struct tm tmTime;
+    struct timeb tb;
+
+    ftime(&tb);
+    localtime_r(&tb.time, &tmTime);
+
+    if (1) {
+        int tmYear		= tmTime.tm_year + 1900;
+        int tmMon		= tmTime.tm_mon + 1;
+        int tmMday		= tmTime.tm_mday;
+        int tmHour		= tmTime.tm_hour;
+        int tmMin		= tmTime.tm_min;
+        int tmSec		= tmTime.tm_sec;
+        int tmMillisec	= tb.millitm;
+        sprintf(szTime, "%d-%d-%d %d:%d:%d.%d", tmYear, tmMon, tmMday, tmHour, tmMin, tmSec, tmMillisec);
+    }
+
+    prefixLength = sprintf(szBuffer, "[%s][%s][DT] ", szTime, tag);
+    p = szBuffer + prefixLength;
+    
+    if (1) {
+        va_list ap;
+        va_start(ap, format);
+        vsnprintf(p, 1024 - prefixLength, format, ap);
+        va_end(ap);
+    }
+
+    printf("%s\n", szBuffer);
+}
+
+void ffmpegLogCallback(void* ptr, int level, const char* fmt, va_list vl) {
+    static int printPrefix	= 1;
+    static int count		= 0;
+    static char prev[1024]	= { 0 };
+    char line[1024]			= { 0 };
+    static int is_atty;
+    AVClass* avc = ptr ? *(AVClass**)ptr : NULL;
+    if (level > AV_LOG_DEBUG) {
+        return;
+    }
+
+    line[0] = 0;
+
+    if (printPrefix && avc) {
+        if (avc->parent_log_context_offset) {
+            AVClass** parent = *(AVClass***)(((uint8_t*)ptr) + avc->parent_log_context_offset);
+            if (parent && *parent) {
+                snprintf(line, sizeof(line), "[%s @ %p] ", (*parent)->item_name(parent), parent);
+            }
+        }
+        snprintf(line + strlen(line), sizeof(line) - strlen(line), "[%s @ %p] ", avc->item_name(ptr), ptr);
+    }
+
+    vsnprintf(line + strlen(line), sizeof(line) - strlen(line), fmt, vl);
+    line[strlen(line) + 1] = 0;
+    simpleLog("%s", line);
+}
+
+int openCodecContext(AVFormatContext *fmtCtx, enum AVMediaType type, int *streamIdx, AVCodecContext **decCtx) {
+    int ret = 0;
+    do {
+        int streamIndex		= -1;
+        AVStream *st		= NULL;
+        AVCodec *dec		= NULL;
+        AVDictionary *opts	= NULL;
+
+        ret = av_find_best_stream(fmtCtx, type, -1, -1, NULL, 0);
+        if (ret < 0) {
+            simpleLog("Could not find %s stream.", av_get_media_type_string(type));
+            break;
+        }
+
+        streamIndex = ret;
+        st = fmtCtx->streams[streamIndex];
+
+        dec = avcodec_find_decoder(st->codecpar->codec_id);
+        if (!dec) {
+            simpleLog("Failed to find %s codec %d.", av_get_media_type_string(type), st->codecpar->codec_id);
+            ret = AVERROR(EINVAL);
+            break;
+        }
+
+        *decCtx = avcodec_alloc_context3(dec);
+        if (!*decCtx) {
+            simpleLog("Failed to allocate the %s codec context.", av_get_media_type_string(type));
+            ret = AVERROR(ENOMEM);
+            break;
+        }
+
+        if ((ret = avcodec_parameters_to_context(*decCtx, st->codecpar)) != 0) {
+            simpleLog("Failed to copy %s codec parameters to decoder context.", av_get_media_type_string(type));
+            break;
+        }
+
+        av_dict_set(&opts, "refcounted_frames", "0", 0);
+
+        if ((ret = avcodec_open2(*decCtx, dec, NULL)) != 0) {
+            simpleLog("Failed to open %s codec.", av_get_media_type_string(type));
+            break;
+        }
+
+        *streamIdx = streamIndex;
+        avcodec_flush_buffers(*decCtx);
+    } while (0);
+
+    return ret;
+}
+
+void closeCodecContext(AVFormatContext *fmtCtx, AVCodecContext *decCtx, int streamIdx) {
+    do {
+        if (fmtCtx == NULL || decCtx == NULL) {
+            break;
+        }
+
+        if (streamIdx < 0 || streamIdx >= fmtCtx->nb_streams) {
+            break;
+        }
+
+        fmtCtx->streams[streamIdx]->discard = AVDISCARD_ALL;
+        avcodec_close(decCtx);
+    } while (0);
+}
+
+ErrorCode copyYuvData(AVFrame *frame, unsigned char *buffer, int width, int height) {
+    ErrorCode ret		= kErrorCode_Success;
+    unsigned char *src	= NULL;
+    unsigned char *dst	= buffer;
+    int i = 0;
+    do {
+        if (frame == NULL || buffer == NULL) {
+            ret = kErrorCode_Invalid_Param;
+            break;
+        }
+
+        if (!frame->data[0] || !frame->data[1] || !frame->data[2]) {
+            ret = kErrorCode_Invalid_Param;
+            break;
+        }
+
+        for (i = 0; i < height; i++) {
+            src = frame->data[0] + i * frame->linesize[0];
+            memcpy(dst, src, width);
+            dst += width;
+        }
+
+        for (i = 0; i < height / 2; i++) {
+            src = frame->data[1] + i * frame->linesize[1];
+            memcpy(dst, src, width / 2);
+            dst += width / 2;
+        }
+
+        for (i = 0; i < height / 2; i++) {
+            src = frame->data[2] + i * frame->linesize[2];
+            memcpy(dst, src, width / 2);
+            dst += width / 2;
+        }
+    } while (0);
+    return ret;	
+}
+
+/*
+ErrorCode yuv420pToRgb32(unsigned char *yuvBuff, unsigned char *rgbBuff, int width, int height) {
+    ErrorCode ret = kErrorCode_Success;
+    AVPicture yuvPicture, rgbPicture;
+    uint8_t *ptmp = NULL;
+    do {
+        if (yuvBuff == NULL || rgbBuff == NULL) {
+            ret = kErrorCode_Invalid_Param
+            break;
+        }
+
+        if (decoder == NULL || decoder->swsCtx == NULL) {
+            ret = kErrorCode_Invalid_Param
+            break;
+        }
+
+        
+        avpicture_fill(&yuvPicture, yuvBuff, AV_PIX_FMT_YUV420P, width, height);
+        avpicture_fill(&rgbPicture, rgbBuff, AV_PIX_FMT_RGB32, width, height);
+
+        ptmp = yuvPicture.data[1];
+        yuvPicture.data[1] = yuvPicture.data[2];
+        yuvPicture.data[2] = ptmp;
+
+        sws_scale(decoder->swsCtx, yuvPicture.data, yuvPicture.linesize, 0, height, rgbPicture.data, rgbPicture.linesize);
+    } while (0);
+    return ret;
+}
+*/
+
+int roundUp(int numToRound, int multiple) {
+    return (numToRound + multiple - 1) & -multiple;
+}
+
+ErrorCode processDecodedVideoFrame(AVFrame *frame) {
+    ErrorCode ret = kErrorCode_Success;
+    double timestamp = 0.0f;
+    do {
+        if (frame == NULL ||
+            decoder->videoCallback == NULL ||
+            decoder->yuvBuffer == NULL ||
+            decoder->videoBufferSize <= 0) {
+            ret = kErrorCode_Invalid_Param;
+            break;
+        }
+
+        if (decoder->videoCodecContext->pix_fmt != AV_PIX_FMT_YUV420P) {
+            simpleLog("Not YUV420P, but unsupported format %d.", decoder->videoCodecContext->pix_fmt);
+            ret = kErrorCode_Invalid_Format;
+            break;
+        }
+
+        ret = copyYuvData(frame, decoder->yuvBuffer, decoder->videoCodecContext->width, decoder->videoCodecContext->height);
+        if (ret != kErrorCode_Success) {
+            break;
+        }
+
+        /*
+        ret = yuv420pToRgb32(decoder->yuvBuffer, decoder->rgbBuffer, decoder->videoCodecContext->width, decoder->videoCodecContext->height);
+        if (ret != kErrorCode_Success) {
+            break;
+        }
+        */
+
+        timestamp = (double)frame->pts * av_q2d(decoder->avformatContext->streams[decoder->videoStreamIdx]->time_base);
+
+        if (decoder->accurateSeek && timestamp < decoder->beginTimeOffset) {
+            //simpleLog("video timestamp %lf < %lf", timestamp, decoder->beginTimeOffset);
+            ret = kErrorCode_Old_Frame;
+            break;
+        }
+        decoder->videoCallback(decoder->yuvBuffer, decoder->videoSize, timestamp);
+    } while (0);
+    return ret;
+}
+
+ErrorCode processDecodedAudioFrame(AVFrame *frame) {
+    ErrorCode ret       = kErrorCode_Success;
+    int sampleSize      = 0;
+    int audioDataSize   = 0;
+    int targetSize      = 0;
+    int offset          = 0;
+    int i               = 0;
+    int ch              = 0;
+    double timestamp    = 0.0f;
+    do {
+        if (frame == NULL) {
+            ret = kErrorCode_Invalid_Param;
+            break;
+        }
+
+        sampleSize = av_get_bytes_per_sample(decoder->audioCodecContext->sample_fmt);
+        if (sampleSize < 0) {
+            simpleLog("Failed to calculate data size.");
+            ret = kErrorCode_Invalid_Data;
+            break;
+        }
+
+        if (decoder->pcmBuffer == NULL) {
+            decoder->pcmBuffer = (unsigned char*)av_mallocz(kInitialPcmBufferSize);
+            decoder->currentPcmBufferSize = kInitialPcmBufferSize;
+            simpleLog("Initial PCM buffer size %d.", decoder->currentPcmBufferSize);
+        }
+
+        audioDataSize = frame->nb_samples * decoder->audioCodecContext->channels * sampleSize;
+        if (decoder->currentPcmBufferSize < audioDataSize) {
+            targetSize = roundUp(audioDataSize, 4);
+            simpleLog("Current PCM buffer size %d not sufficient for data size %d, round up to target %d.",
+                decoder->currentPcmBufferSize,
+                audioDataSize,
+                targetSize);
+            decoder->currentPcmBufferSize = targetSize;
+            av_free(decoder->pcmBuffer);
+            decoder->pcmBuffer = (unsigned char*)av_mallocz(decoder->currentPcmBufferSize);
+        }
+
+        for (i = 0; i < frame->nb_samples; i++) {
+            for (ch = 0; ch < decoder->audioCodecContext->channels; ch++) {
+                memcpy(decoder->pcmBuffer + offset, frame->data[ch] + sampleSize * i, sampleSize);
+                offset += sampleSize;
+            }
+        }
+
+        timestamp = (double)frame->pts * av_q2d(decoder->avformatContext->streams[decoder->audioStreamIdx]->time_base);
+
+        if (decoder->accurateSeek && timestamp < decoder->beginTimeOffset) {
+            //simpleLog("audio timestamp %lf < %lf", timestamp, decoder->beginTimeOffset);
+            ret = kErrorCode_Old_Frame;
+            break;
+        }
+        if (decoder->audioCallback != NULL) {
+            decoder->audioCallback(decoder->pcmBuffer, audioDataSize, timestamp);
+        }
+    } while (0);
+    return ret;
+}
+
+ErrorCode decodePacket(AVPacket *pkt, int *decodedLen) {
+    int ret = 0;
+    int isVideo = 0;
+    AVCodecContext *codecContext = NULL;
+
+    if (pkt == NULL || decodedLen == NULL) {
+        simpleLog("decodePacket invalid param.");
+        return kErrorCode_Invalid_Param;
+    }
+
+    *decodedLen = 0;
+
+    if (pkt->stream_index == decoder->videoStreamIdx) {
+        codecContext = decoder->videoCodecContext;
+        isVideo = 1;
+    } else if (pkt->stream_index == decoder->audioStreamIdx) {
+        codecContext = decoder->audioCodecContext;
+        isVideo = 0;
+    } else {
+        return kErrorCode_Invalid_Data;
+    }
+
+    ret = avcodec_send_packet(codecContext, pkt);
+    if (ret < 0) {
+        simpleLog("Error sending a packet for decoding %d.", ret);
+        return kErrorCode_FFmpeg_Error;
+    }
+
+    while (ret >= 0) {
+        ret = avcodec_receive_frame(codecContext, decoder->avFrame);
+        if (ret == AVERROR(EAGAIN)) {
+            return kErrorCode_Success;
+        } else if (ret == AVERROR_EOF) {
+            return kErrorCode_Eof;
+        } else if (ret < 0) {
+            simpleLog("Error during decoding %d.", ret);
+            return kErrorCode_FFmpeg_Error;
+        } else {
+            int r = isVideo ? processDecodedVideoFrame(decoder->avFrame) : processDecodedAudioFrame(decoder->avFrame);
+            if (r == kErrorCode_Old_Frame) {
+                return r;
+            }
+        }
+    }
+
+    *decodedLen = pkt->size;
+    return kErrorCode_Success;
+}
+
+int readFromFile(uint8_t *data, int len) {
+    //simpleLog("readFromFile %d.", len);
+    int32_t ret         = -1;
+    int availableBytes  = 0;
+    int canReadLen      = 0;
+    do {
+        if (decoder->fp == NULL) {
+            break;
+        }
+
+        availableBytes = decoder->fileWritePos - decoder->fileReadPos;
+        if (availableBytes <= 0) {
+            break;
+        }
+
+        fseek(decoder->fp, decoder->fileReadPos, SEEK_SET);
+        canReadLen = MIN(availableBytes, len);
+        fread(data, canReadLen, 1, decoder->fp);
+        decoder->fileReadPos += canReadLen;
+        ret = canReadLen;
+    } while (0);
+    //simpleLog("readFromFile ret %d.", ret);
+    return ret;
+}
+
+int readFromFifo(uint8_t *data, int len) {
+    //simpleLog("readFromFifo %d.", len);
+    int32_t ret         = -1;
+    int availableBytes  = 0;
+    int canReadLen      = 0;
+    do {
+        if (decoder->fifo == NULL) {
+            break;
+        }	
+
+        availableBytes = av_fifo_size(decoder->fifo);
+        if (availableBytes <= 0) {
+            break;
+        }
+
+        canReadLen = MIN(availableBytes, len);
+        av_fifo_generic_read(decoder->fifo, data, canReadLen, NULL);
+        ret = canReadLen;
+    } while (0);
+    //simpleLog("readFromFifo ret %d, left %d.", ret, av_fifo_size(decoder->fifo));
+    return ret;
+}
+
+int readCallback(void *opaque, uint8_t *data, int len) {
+    //simpleLog("readCallback %d.", len);
+    int32_t ret         = -1;
+    do {
+        if (decoder == NULL) {
+            break;
+        }
+
+        if (data == NULL || len <= 0) {
+            break;
+        }		
+
+        ret = decoder->isStream ? readFromFifo(data, len) : readFromFile(data, len);
+    } while (0);
+    //simpleLog("readCallback ret %d.", ret);
+    return ret;
+}
+
+int64_t seekCallback(void *opaque, int64_t offset, int whence) {
+    int64_t ret         = -1;
+    int64_t pos         = -1;
+    int64_t req_pos     = -1;
+    //simpleLog("seekCallback %lld %d.", offset, whence);
+    do {
+        if (decoder == NULL || decoder->isStream || decoder->fp == NULL) {
+            break;
+        }
+
+        if (whence == AVSEEK_SIZE) {
+            ret = decoder->fileSize;
+            break;
+        }
+
+        if (whence != SEEK_END && whence != SEEK_SET && whence != SEEK_CUR) {
+            break;
+        }
+
+        ret = fseek(decoder->fp, (long)offset, whence);
+        if (ret == -1) {
+            break;
+        }
+
+        pos = (int64_t)ftell(decoder->fp);
+        if (pos < decoder->lastRequestOffset || pos > decoder->fileWritePos) {
+            decoder->lastRequestOffset  = pos;
+            decoder->fileReadPos        = pos;
+            decoder->fileWritePos       = pos;
+            req_pos                     = pos;
+            ret                         = -1;  // Forcing not to call read at once.
+            decoder->requestCallback(pos, getAailableDataSize());
+            simpleLog("Will request %lld and return %lld.", pos, ret);
+            break;
+        }
+
+        decoder->fileReadPos = pos;
+        ret = pos;
+    } while (0);
+    //simpleLog("seekCallback return %lld.", ret);
+
+    if (decoder != NULL && decoder->requestCallback != NULL) {
+        decoder->requestCallback(req_pos, getAailableDataSize());
+    }
+    return ret;
+}
+
+int writeToFile(unsigned char *buff, int size) {
+    int ret = 0;
+    int64_t leftBytes = 0;
+    int canWriteBytes = 0;
+    do {
+        if (decoder->fp == NULL) {
+            ret = -1;
+            break;
+        }
+
+        leftBytes = decoder->fileSize - decoder->fileWritePos;
+        if (leftBytes <= 0) {
+            break;
+        }
+
+        canWriteBytes = MIN(leftBytes, size);
+        fseek(decoder->fp, decoder->fileWritePos, SEEK_SET);
+        fwrite(buff, canWriteBytes, 1, decoder->fp);
+        decoder->fileWritePos += canWriteBytes;
+        ret = canWriteBytes;
+    } while (0);
+    return ret;
+}
+
+int writeToFifo(unsigned char *buff, int size) {
+    int ret = 0;
+    do {
+        if (decoder->fifo == NULL) {
+            ret = -1;
+            break;
+        }
+
+        int64_t leftSpace = av_fifo_space(decoder->fifo);
+        if (leftSpace < size) {
+            int growSize = 0;
+            do {
+                leftSpace += decoder->fifoSize;
+                growSize += decoder->fifoSize;
+                decoder->fifoSize += decoder->fifoSize;
+            } while (leftSpace < size);
+            av_fifo_grow(decoder->fifo, growSize);
+
+            simpleLog("Fifo size growed to %d.", decoder->fifoSize);
+            if (decoder->fifoSize >= kMaxFifoSize) {
+                simpleLog("[Warn] Fifo size larger than %d.", kMaxFifoSize);
+            }
+        }
+
+        //simpleLog("Wrote %d bytes to fifo, total %d.", size, av_fifo_size(decoder->fifo));
+        ret = av_fifo_generic_write(decoder->fifo, buff, size, NULL);
+    } while (0);
+    return ret;
+}
+
+int getAailableDataSize() {
+    int ret = 0;
+    do {
+        if (decoder == NULL) {
+            break;
+        }
+
+        if (decoder->isStream) {
+            ret = decoder->fifo == NULL ? 0 : av_fifo_size(decoder->fifo);
+        } else {
+            ret = decoder->fileWritePos - decoder->fileReadPos;
+        }
+    } while (0);
+    return ret;
+}
+
+//////////////////////////////////Export methods////////////////////////////////////////
+ErrorCode initDecoder(int fileSize, int logLv) {
+    ErrorCode ret = kErrorCode_Success;
+    do {
+        //Log level.
+        logLevel = logLv;
+
+        if (decoder != NULL) {
+            break;
+        }
+
+        decoder = (WebDecoder *)av_mallocz(sizeof(WebDecoder));
+        if (fileSize >= 0) {
+            decoder->fileSize = fileSize;
+            sprintf(decoder->fileName, "tmp-%lu.mp4", getTickCount());
+            decoder->fp = fopen(decoder->fileName, "wb+");
+            if (decoder->fp == NULL) {
+                simpleLog("Open file %s failed, err: %d.", decoder->fileName, errno);
+                ret = kErrorCode_Open_File_Error;
+                av_free(decoder);
+                decoder = NULL;
+            }
+        } else {
+            decoder->isStream = 1;
+            decoder->fifoSize = kDefaultFifoSize;
+            decoder->fifo = av_fifo_alloc(decoder->fifoSize);
+        }
+    } while (0);
+    simpleLog("Decoder initialized %d.", ret);
+    return ret;
+}
+
+ErrorCode uninitDecoder() {
+    if (decoder != NULL) {
+        if (decoder->fp != NULL) {
+            fclose(decoder->fp);
+            decoder->fp = NULL;
+            remove(decoder->fileName);
+        }
+
+        if (decoder->fifo != NULL) {
+             av_fifo_freep(&decoder->fifo);
+        }
+
+        av_freep(&decoder);
+    }
+
+    av_log_set_callback(NULL);
+
+    simpleLog("Decoder uninitialized.");
+    return kErrorCode_Success;
+}
+
+ErrorCode openDecoder(int *paramArray, int paramCount, long videoCallback, long audioCallback, long requestCallback) {
+    ErrorCode ret = kErrorCode_Success;
+    int r = 0;
+    int i = 0;
+    int params[7] = { 0 };
+    do {
+        simpleLog("打开编码器.");
+
+        av_register_all();
+        avcodec_register_all();
+
+        if (logLevel == kLogLevel_All) {
+            av_log_set_callback(ffmpegLogCallback);
+        }
+        
+        decoder->avformatContext = avformat_alloc_context();
+        decoder->customIoBuffer = (unsigned char*)av_mallocz(kCustomIoBufferSize);
+
+        AVIOContext* ioContext = avio_alloc_context(
+            decoder->customIoBuffer,
+            kCustomIoBufferSize,
+            0,
+            NULL,
+            readCallback,
+            NULL,
+            seekCallback);
+        if (ioContext == NULL) {
+            ret = kErrorCode_FFmpeg_Error;
+            simpleLog("avio_alloc_context failed.");
+            break;
+        }
+
+        decoder->avformatContext->pb = ioContext;
+        decoder->avformatContext->flags = AVFMT_FLAG_CUSTOM_IO;
+		simpleLog("avformat_open_input.");
+
+        r = avformat_open_input(&decoder->avformatContext, NULL, NULL, NULL);
+        if (r != 0) {
+            ret = kErrorCode_FFmpeg_Error;
+            char err_info[32] = { 0 };
+            av_strerror(ret, err_info, 32);
+            simpleLog("avformat_open_input failed %d %s.", ret, err_info);
+            break;
+        }
+        
+        simpleLog("avformat_find_stream_info");
+
+        r = avformat_find_stream_info(decoder->avformatContext, NULL);
+        if (r != 0) {
+            ret = kErrorCode_FFmpeg_Error;
+            simpleLog("av_find_stream_info failed %d.", ret);
+            break;
+        }
+			
+			
+	
+        simpleLog("avformat_find_stream_info 成功.");
+
+        for (i = 0; i < decoder->avformatContext->nb_streams; i++) {
+            decoder->avformatContext->streams[i]->discard = AVDISCARD_DEFAULT;
+        }
+
+        r = openCodecContext(
+            decoder->avformatContext,
+            AVMEDIA_TYPE_VIDEO,
+            &decoder->videoStreamIdx,
+            &decoder->videoCodecContext);
+        if (r != 0) {
+            ret = kErrorCode_FFmpeg_Error;
+            simpleLog("Open video codec context failed %d.", ret);
+            break;
+        }
+
+        simpleLog("Open video codec context success, video stream index %d %x.",
+            decoder->videoStreamIdx, (unsigned int)decoder->videoCodecContext);
+
+        simpleLog("Video stream index:%d pix_fmt:%d resolution:%d*%d.",
+            decoder->videoStreamIdx,
+            decoder->videoCodecContext->pix_fmt,
+            decoder->videoCodecContext->width,
+            decoder->videoCodecContext->height);
+
+        r = openCodecContext(
+            decoder->avformatContext,
+            AVMEDIA_TYPE_AUDIO,
+            &decoder->audioStreamIdx,
+            &decoder->audioCodecContext);
+        if (r != 0) {
+            ret = kErrorCode_FFmpeg_Error;
+            simpleLog("Open audio codec context failed %d.", ret);
+            break;
+        }
+
+        simpleLog("Open audio codec context success, audio stream index %d %x.",
+            decoder->audioStreamIdx, (unsigned int)decoder->audioCodecContext);
+
+        simpleLog("Audio stream index:%d sample_fmt:%d channel:%d, sample rate:%d.",
+            decoder->audioStreamIdx,
+            decoder->audioCodecContext->sample_fmt,
+            decoder->audioCodecContext->channels,
+            decoder->audioCodecContext->sample_rate);
+
+        av_seek_frame(decoder->avformatContext, -1, 0, AVSEEK_FLAG_BACKWARD);
+
+        /* For RGB Renderer(2D WebGL).
+        decoder->swsCtx = sws_getContext(
+            decoder->videoCodecContext->width,
+            decoder->videoCodecContext->height,
+            decoder->videoCodecContext->pix_fmt, 
+            decoder->videoCodecContext->width,
+            decoder->videoCodecContext->height,
+            AV_PIX_FMT_RGB32,
+            SWS_BILINEAR, 
+            0, 
+            0, 
+            0);
+        if (decoder->swsCtx == NULL) {
+            simpleLog("sws_getContext failed.");
+            ret = kErrorCode_FFmpeg_Error;
+            break;
+        }
+        */
+        
+        decoder->videoSize = avpicture_get_size(
+            decoder->videoCodecContext->pix_fmt,
+            decoder->videoCodecContext->width,
+            decoder->videoCodecContext->height);
+
+        decoder->videoBufferSize = 3 * decoder->videoSize;
+        decoder->yuvBuffer = (unsigned char *)av_mallocz(decoder->videoBufferSize);
+        decoder->avFrame = av_frame_alloc();
+        
+        params[0] = 1000 * (decoder->avformatContext->duration + 5000) / AV_TIME_BASE;
+        params[1] = decoder->videoCodecContext->pix_fmt;
+        params[2] = decoder->videoCodecContext->width;
+        params[3] = decoder->videoCodecContext->height;
+        params[4] = decoder->audioCodecContext->sample_fmt;
+        params[5] = decoder->audioCodecContext->channels;
+        params[6] = decoder->audioCodecContext->sample_rate;
+
+        enum AVSampleFormat sampleFmt = decoder->audioCodecContext->sample_fmt;
+        if (av_sample_fmt_is_planar(sampleFmt)) {
+            const char *packed = av_get_sample_fmt_name(sampleFmt);
+            params[4] = av_get_packed_sample_fmt(sampleFmt);
+        }
+
+        if (paramArray != NULL && paramCount > 0) {
+            for (int i = 0; i < paramCount; ++i) {
+                paramArray[i] = params[i];
+            }
+        }
+
+        decoder->videoCallback = (VideoCallback)videoCallback;
+        decoder->audioCallback = (AudioCallback)audioCallback;
+        decoder->requestCallback = (RequestCallback)requestCallback;
+
+        simpleLog("Decoder opened, duration %ds, picture size %d.", params[0], decoder->videoSize);
+    } while (0);
+
+    if (ret != kErrorCode_Success && decoder != NULL) {
+        av_freep(&decoder);
+    }
+    return ret;
+}
+
+ErrorCode closeDecoder() {
+    ErrorCode ret = kErrorCode_Success;
+    do {
+        if (decoder == NULL || decoder->avformatContext == NULL) {
+            break;
+        }
+
+        if (decoder->videoCodecContext != NULL) {
+            closeCodecContext(decoder->avformatContext, decoder->videoCodecContext, decoder->videoStreamIdx);
+            decoder->videoCodecContext = NULL;
+            simpleLog("Video codec context closed.");
+        }
+
+        if (decoder->audioCodecContext != NULL) {
+            closeCodecContext(decoder->avformatContext, decoder->audioCodecContext, decoder->audioStreamIdx);
+            decoder->audioCodecContext = NULL;
+            simpleLog("Audio codec context closed.");
+        }
+
+        AVIOContext *pb = decoder->avformatContext->pb;
+        if (pb != NULL) {
+            if (pb->buffer != NULL) {
+                av_freep(&pb->buffer);
+                decoder->customIoBuffer = NULL;
+            }
+            av_freep(&decoder->avformatContext->pb);
+            simpleLog("IO context released.");
+        }
+
+        avformat_close_input(&decoder->avformatContext);
+        decoder->avformatContext = NULL;
+        simpleLog("Input closed.");
+
+        if (decoder->yuvBuffer != NULL) {
+            av_freep(&decoder->yuvBuffer);
+        }
+
+        if (decoder->pcmBuffer != NULL) {
+            av_freep(&decoder->pcmBuffer);
+        }
+        
+        if (decoder->avFrame != NULL) {
+            av_freep(&decoder->avFrame);
+        }
+        simpleLog("All buffer released.");
+    } while (0);
+    return ret;
+}
+
+int sendData(unsigned char *buff, int size) {
+    int ret = 0;
+    int64_t leftBytes = 0;
+    int canWriteBytes = 0;
+    do {
+        if (decoder == NULL) {
+            ret = -1;
+            break;
+        }
+
+        if (buff == NULL || size == 0) {
+            ret = -2;
+            break;
+        }
+
+        ret = decoder->isStream ? writeToFifo(buff, size) : writeToFile(buff, size);
+    } while (0);
+    return ret;
+}
+
+ErrorCode decodeOnePacket() {
+    ErrorCode ret	= kErrorCode_Success;
+    int decodedLen	= 0;
+    int r			= 0;
+
+    AVPacket packet;
+    av_init_packet(&packet);
+    do {
+        if (decoder == NULL) {
+            ret = kErrorCode_Invalid_State;
+            break;
+        }
+
+        if (getAailableDataSize() <= 0) {
+            ret = kErrorCode_Invalid_State;
+            break;
+        }
+
+        packet.data = NULL;
+        packet.size = 0;
+
+        r = av_read_frame(decoder->avformatContext, &packet);
+        if (r == AVERROR_EOF) {
+            ret = kErrorCode_Eof;
+            break;
+        }
+
+        if (r < 0 || packet.size == 0) {
+            break;
+        }
+
+        do {
+            ret = decodePacket(&packet, &decodedLen);
+            if (ret != kErrorCode_Success) {
+                break;
+            }
+
+            if (decodedLen <= 0) {
+                break;
+            }
+
+            packet.data += decodedLen;
+            packet.size -= decodedLen;
+        } while (packet.size > 0);
+    } while (0);
+    av_packet_unref(&packet);
+    return ret;
+}
+
+ErrorCode seekTo(int ms, int accurateSeek) {
+    int ret = 0;
+    int64_t pts = (int64_t)ms * 1000;
+    decoder->accurateSeek = accurateSeek;
+    ret = avformat_seek_file(decoder->avformatContext,
+                                 -1,
+                                 INT64_MIN,
+                                 pts,
+                                 pts,
+                                 AVSEEK_FLAG_BACKWARD);
+    simpleLog("Native seek to %d return %d %d.", ms, ret, decoder->accurateSeek);
+    if (ret == -1) {
+        return kErrorCode_FFmpeg_Error;
+    } else {
+        avcodec_flush_buffers(decoder->videoCodecContext);
+        avcodec_flush_buffers(decoder->audioCodecContext);
+
+        // Trigger seek callback
+        AVPacket packet;
+        av_init_packet(&packet);
+        av_read_frame(decoder->avformatContext, &packet);
+
+        decoder->beginTimeOffset = (double)ms / 1000;
+        return kErrorCode_Success;
+    }
+}
+
+int main() {
+    //simpleLog("Native loaded.");
+    return 0;
+}
+
+#ifdef __cplusplus
+}
+#endif

+ 12 - 12
screenIos/helper.js

@@ -67,54 +67,54 @@
 	 return outPut;
  }
  //触发键盘事件, code表示键盘值
- function ExexuteKeyDown(code)
+ function ExexuteKeyDown(code,sn)
  {	 
 	 var jsonObj = {"data":{"keyCode":code, "event":"keyDown"}};
 	 var json = JSON.stringify(jsonObj);
 	 console.log("json==================",json);
-	 var sn = "RK3923C1201900139";
+	 // var sn = "RK3923C1201900139";
 	 return makeFrame(sn, 0, json);
  }
  //触发鼠标按下事件,x:x坐标, y:y坐标
- function ExexuteMouseDown(x, y)
+ function ExexuteMouseDown(x, y,sn)
  {
 	 var jsonObj = {"data":{"action":0, "count":1, "pointerId":0,"x":x, "y":y}, "event":"0"};
 	 var json = JSON.stringify(jsonObj);
 	 console.log("json==================",json);
-	 var sn = "RK3923C1201900139";
+	 // var sn = "RK3923C1201900139";
 	 return  makeFrame(sn, 0, json);
  }
   //触发鼠标移动事件,x:x坐标, y:y坐标
- function ExexuteMouseMove(x, y)
+ function ExexuteMouseMove(x, y,sn)
  {
 	 var jsonObj = {"data":{"action":2, "count":1, "pointerId":0,"x":x, "y":y}, "event":"2"};
 	 var json = JSON.stringify(jsonObj);
-	 var sn = "RK3923C1201900139";
+	 // var sn = "RK3923C1201900139";
 	 return  makeFrame(sn, 0, json);	 
  }
  
- function ExexuteKeyBoard(keycode)
+ function ExexuteKeyBoard(keycode,sn)
  {
  	 var jsonObj = {"data":{"keyCode":keycode.toString()},"event":"keyCode"};
  	 var json = JSON.stringify(jsonObj);
- 	 var sn = "RK3923C1201900139";
+ 	 // var sn = "RK3923C1201900139";
  	 return  makeFrame(sn, 0, json);	 
  }
  
   //触发鼠标抬起事件,x:x坐标, y:y坐标
- function ExexuteMouseUp(x, y)
+ function ExexuteMouseUp(x, y,sn)
  {
 	 var jsonObj = {"data":{"action":1, "count":1, "pointerId":0,"x":x, "y":y}, "event":"1"};
 	 var json = JSON.stringify(jsonObj);
-	 var sn = "RK3923C1201900139";
+	 // var sn = "RK3923C1201900139";
 	 return makeFrame(sn, 0, json);	 
  }
  //触发滑动事件
- function ExexuteMove(data)
+ function ExexuteMove(data,sn)
  {
  	 // var jsonObj = {"data":{"action":1, "count":1, "pointerId":0,"x":x, "y":y}, "event":"1"};
  	 // var json = JSON.stringify(jsonObj);
- 	 var sn = "RK3923C1201900139";
+ 	 // var sn = "RK3923C1201900139";
  	 return makeFrame(sn, 0, data);	 
  }
  //示例:

+ 1 - 0
screenIos/websocket.js

@@ -27,6 +27,7 @@ ws.addEventListener('message', function(event) {
 	if (input[0] == 0xff) {
 		// console.log("1111111")
 		// decodeAAC(input);
+		self.postMessage(input);
 
 	} else {
 		self.postMessage(input);