基于webrtc的p2p H265播放器实现二

32 篇文章 5 订阅
24 篇文章 19 订阅
本文详细介绍了如何通过worker并行处理在JavaScript中实现H265解码,涉及player和decoderworker的协作,以及关键函数如handleVideo(), stopVideo(), 和WebGLPlayer的使用。讨论了UI限制和webrtc信令交互在worker中的应用,以及播放器状态管理和帧处理技术。
摘要由CSDN通过智能技术生成

      上篇讲到采用ffmpeg wasm实现了H265解码的核心功能,这篇主要讲播放器的控制和实现

 整个播放器开启两个worker,playerworker 和decoderworker,并通过消息进行worker间通信,可以理解成js下的多线程,很多UI和webrtc的api在worker里是没法使用的,所以在写的时候一定注意这个坑。播放器入口函数handleVideo()建立了 player worker 在player里面再建立decoderworker,同时启动webrtc信令交互建立p2p连接,打开H265data 传输的dc通道


var webglPlayer, canvas, videoWidth, videoHeight, yLength, uvLength;
var player=null;

function handleVideo() {
    player = new Worker("Player.js");
    startDeviceSession(player);
    player.onmessage = function (evt){
        var objData = evt.data;
        switch (objData.t) {
        case kplayeVideoFrame:
            webgldisplayVideoFrame(objData.d);
            break;
        default:
            break;
        }    
    }
    var req = {
        t: kstartPlayerCoderReq,
    };
    player.postMessage(req);

}
function stopVideo(){
    endWebrtc();
    endMqtt();
}

var webt1=new Date().getTime();
function webgldisplayVideoFrame(obj) {
    var data = new Uint8Array(obj.data);
    var width = obj.width;
    var height = obj.height;
    var yLength = width * height;
    var uvLength = (width / 2) * (height / 2);
    if(!webglPlayer) {
        const canvasId = "playCanvas";
        canvas = document.getElementById(canvasId);
        webglPlayer = new WebGLPlayer(canvas, {
            preserveDrawingBuffer: false
        });
    }

    const t2=new Date().getTime()-webt1;
    console.log("display time:"+t2+" width:"+width+" height"+height+" yLength"+yLength+" uvLength"+uvLength);
    webglPlayer.renderFrame(data, width, height, yLength, uvLength);
    webt1 = new Date().getTime()
}
self.Module = {
    onRuntimeInitialized: function () {

    }
};
var webglPlayer, canvas, videoWidth, videoHeight, yLength, uvLength;
var LOG_LEVEL_JS = 0;
var LOG_LEVEL_WASM = 1;
var LOG_LEVEL_FFMPEG = 2;
var DECODER_H264 = 0;
var DECODER_H265 = 1;
self.importScripts("common.js");
function Player(){
    this.decoder_type = DECODER_H265;
    this.decoding = false;
    this.webrtcplayerState = playerStateIdle;
    this.decodeInterval     = 5;
    this.urgent = false;

    this.frameBuffer = [];
}
Player.prototype.setDecoder=function(type) {
    this.decoder_type = type;
}
Player.prototype.startDecoder=function(){
    var self = this;
    this.decoderworker=new Worker("decoder.js");
    // this.decoderworker.initDecoder();
    var req = {
        t: kInitDecoderReq,
    };
    this.decoderworker.postMessage(req);
    this.decoderworker.onmessage=function (evt) {
        var objData = evt.data;
        switch (objData.t) {
            case kInitDecoderRsp:
                self.onInitDecoder(objData);
                break;
            case kOpenDecoderRsp:
                self.onOpenDecoder(objData);
                break;
            case kVideoFrame:
                self.onVideoFrame(objData);
                break;
        }
    }

};
Player.prototype.startDecoding = function () {
    var req = {
        t: kStartDecodingReq,
        i: this.urgent ? 0 : this.decodeInterval,
    };
    this.decoderworker.postMessage(req);
    this.decoding = true;
    this.webrtcplayerState = playerStatePlaying;
    
};

Player.prototype.pauseDecoding = function () {
    var req = {
        t: kPauseDecodingReq
    };
    this.decoderworker.postMessage(req);
    this.decoding = false;
};
Player.prototype.onOpenDecoder=function(objData){
     console.log("reqOpenDecoder response " + objData.e + ".");
    if(objData.e==0){
       this.startDecoding();
    }

}
Player.prototype.onInitDecoder = function (objData) {
  

     console.log("Init decoder response " + objData.e + ".");
    this.reqOpenDecoder();

};
Player.prototype.reqOpenDecoder=function(){
    var req = {
        t: kOpenDecoderReq
    };
    this.decoderworker.postMessage(req);
}
Player.prototype.onFrameData = function (data, len) {
    // console.log("Got data bytes=" + start + "-" + end + ".");
 
    var objData = {
        t: kFeedDataReq,
        d: data
    };
    this.decoderworker.postMessage(objData, [objData.d]);
}
Player.prototype.startBuffering = function () {
    this.buffering = true;
    this.showLoading();
    this.pause();
}

Player.prototype.stopBuffering = function () {
    this.buffering = false;
    this.hideLoading();
    this.resume();
}

Player.prototype.displayVideoFrame = function (frame) {

    var audioTimestamp=0;
    var delay=0
    // console.log("displayVideoFrame delay=" + delay + "=" + " " + frame.s  + " - (" + audioCurTs  + " + " + this.beginTimeOffset + ")" + "->" + audioTimestamp);

    if (audioTimestamp <= 0 || delay <= 0) {
        this.renderVideoFrame(frame.d);
        return true;
    }
    return false;
};

Player.prototype.displayLoop = function() {
    if (this.webrtcplayerState !== playerStateIdle) {
        requestAnimationFrame(this.displayLoop.bind(this));
    }
    if(this.frameBuffer.length>0){
        var frame = this.frameBuffer[0];
        if (this.displayVideoFrame(frame)) {
            this.frameBuffer.shift();
        }
    }
};

Player.prototype.renderVideoFrame = function (data) {
    // var self = this;
    var playFrame={
        t: kplayeVideoFrame,
        d: data
    }
    self.postMessage(playFrame,[playFrame.d.data.buffer])

};
Player.prototype.getBufferTimerLength = function() {
    if (!this.frameBuffer || this.frameBuffer.length == 0) {
        return 0;
    }

    let oldest = this.frameBuffer[0];
    let newest = this.frameBuffer[this.frameBuffer.length - 1];
    return newest.s - oldest.s;
};
Player.prototype.bufferFrame = function (frame) {
    // If not decoding, it may be frame before seeking, should be discarded.

    this.frameBuffer.push(frame);
     console.log("bufferFrame pts:" + frame.s+" w:" + frame.d.width + ", h: " + frame.d.height);

}
Player.prototype.onVideoFrame = function (frame) {
    this.bufferFrame(frame);
};

Player.prototype.processReq = function (req) {
    // console.log("processReq " + req.t + ".");
    switch (req.t) {
        case kstartPlayerCoderReq:
            this.startDecoder();
            break;
        case kInitPlayerReq:
            this.displayLoop();
            break;           
        case ksendPlayerVideoFrameReq:
            this.onFrameData(req.d,req.l);
            break;
        default:
            this.logger.logError("Unsupport messsage " + req.t);
    }
};
self.player = new Player;

self.onmessage = function (evt) {
    if (!self.player) {
        console.log("[ER] Player not initialized!");
        return;
    }

    var req = evt.data;


    self.player.processReq(req);
};

 

 

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
WebRTC是一种用于实时通信的开放式框架,支持音视频传输和数据传输。然而,原生的WebRTC API不支持H265编解码,只支持H264编解码。因此,为了兼容H265并降低延迟,可以采用一种新的实现思路。 这个实现思路是使用WebRTC的DataChannel作为传输协议,并使用WebAssembly(wasm)来实现H265的解码。可以使用C语言编写H265解码的代码,并将其编译成JavaScript库,供前端调用。同时,可以使用WebGL来进行视频渲染。 通过使用DataChannel,可以控制缓存和传输数据的方式。DataChannel基于UDP协议,可以在解码和渲染过程中进行自定义缓存,从而理论上可以大大降低延迟。这种方案的整体实现包括WebRTC DataChannel、WebAssembly H265解码和WebGL渲染。 总体而言,这种方案使用了WebRTC的DataChannel来传输数据,使用WebAssembly实现H265解码,并使用WebGL进行渲染。这种方案将在一系列文章中介绍其实现和细节。<span class="em">1</span><span class="em">2</span><span class="em">3</span> #### 引用[.reference_title] - *1* *2* *3* [webrtc支持H265(一)webrtc datachannel的特性](https://blog.csdn.net/mo4776/article/details/131333916)[target="_blank" data-report-click={"spm":"1018.2226.3001.9630","extra":{"utm_source":"vip_chatgpt_common_search_pc_result","utm_medium":"distribute.pc_search_result.none-task-cask-2~all~insert_cask~default-1-null.142^v93^chatgptT3_1"}}] [.reference_item style="max-width: 100%"] [ .reference_list ]
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值