WebAssembly编译ffmpeg

编译ffmpeg

脚本 build.sh

export FFMPEG_PATH=./ffmpeg-snapshot/decoder_wasm/ffmpeg
echo "Running Emscripten..."
emcc -O1 ffmpeg_decode.c --pre-js decode.js \
-I ${FFMPEG_PATH}/include/ \
${FFMPEG_PATH}/lib/libavcodec.a  \
${FFMPEG_PATH}/lib/libavutil.a  \
${FFMPEG_PATH}/lib/libswscale.a  \
-o ffmpeg_decode.js \
-s EXPORTED_FUNCTIONS=_malloc,_free \
-s ALLOW_MEMORY_GROWTH=1 \
-s ASSERTIONS=1 \
-lworkerfs.js
# -s EXPORTED_RUNTIME_METHODS=ccall,cwrap,allocate,UTF8ToString,intArrayFromString \
# -s ENVIRONMENT=web \
# -s MODULARIZE=1 \


# -s FORCE_FILESYSTEM=1 \
# -s RESERVED_FUNCTION_POINTERS \
# -s EXPORT_ES6=1 \
# -s USE_ES6_IMPORT_META=0

echo "Finished Build"

源码

#include <libavcodec/avcodec.h>
#include <libavutil/channel_layout.h>
#include <libavutil/common.h>
#include <libavutil/frame.h>
#include <libavutil/samplefmt.h>
#include <libavutil/opt.h>
#include <libavutil/imgutils.h>
#include <libavutil/parseutils.h>
#include <libavutil/mem.h>
#include <libswscale/swscale.h>
#include <libavformat/avformat.h>

#ifndef EM_PORT_API
#if defined(__EMSCRIPTEN__)
#include <emscripten.h>
#if defined(__cplusplus)
#define EM_PORT_API(rettype) extern "C" rettype EMSCRIPTEN_KEEPALIVE
#else
#define EM_PORT_API(rettype) rettype EMSCRIPTEN_KEEPALIVE
#endif
#else
#if defined(__cplusplus)
#define EM_PORT_API(rettype) extern "C" rettype
#else
#define EM_PORT_API(rettype) rettype
#endif
#endif
#endif

const AVCodec *videoCodec = NULL;
AVCodecContext *videoCodecCtx = NULL;
AVCodecParserContext *parser = NULL;
AVPacket *pkt = NULL;
AVFrame *yuvFrame = NULL;
AVFrame *rgbFrame = NULL;
struct SwsContext *img_ctx = NULL;
unsigned char *out_buffer = NULL;
int frameWidth = 0;
int frameHeight = 0;
uint8_t *frame = NULL;

EM_PORT_API(int)
getWidth()
{
  return frameWidth;
}

EM_PORT_API(int)
getHeight()
{
  return frameHeight;
}

EM_PORT_API(uint8_t *)
getFrame()
{
  return frame;
}

EM_PORT_API(void)
init(int codecID)
{
  pkt = av_packet_alloc();
  if (!pkt)
  {
    printf("pkt alloc failed.\n");
    return;
  }
  yuvFrame = av_frame_alloc();
  if (!yuvFrame)
  {
    printf("yuvFrame alloc failed.\n");
    return;
  }
  rgbFrame = av_frame_alloc();
  if (!rgbFrame)
  {
    printf("rgbFrame alloc failed.\n");
    return;
  }
  videoCodec = avcodec_find_decoder(codecID);
  if (!videoCodec)
  {
    printf("videoCodec find failed.\n");
    return;
  }
  parser = av_parser_init(codecID);
  if (!parser)
  {
    printf("parser init failed.\n");
    return;
  }
  videoCodecCtx = avcodec_alloc_context3(videoCodec);
  if (!videoCodecCtx)
  {
    printf("videoCodecCtx alloc failed.\n");
    return;
  }
  int ret = avcodec_open2(videoCodecCtx, videoCodec, NULL);
  if (ret < 0)
  {
    printf("videoCodecCtx alloc failed.\n");
    return;
  }
  printf("codec init success.\n");
}

EM_PORT_API(void)
close()
{
  if (parser)
  {
    av_parser_close(parser);
    parser = NULL;
  }

  if (pkt)
  {
    av_packet_free(&pkt);
    pkt = NULL;
  }

  if (yuvFrame)
  {
    av_frame_free(&yuvFrame);
    yuvFrame = NULL;
  }

  if (rgbFrame)
  {
    av_frame_free(&rgbFrame);
    rgbFrame = NULL;
  }

  if (videoCodecCtx)
  {
    avcodec_free_context(&videoCodecCtx);
  }

  if (videoCodecCtx)
  {
    avcodec_close(videoCodecCtx);
    videoCodecCtx = NULL;
  }

  if (out_buffer)
  {
    av_free(out_buffer);
    out_buffer = NULL;
  }
  if (img_ctx) {
    sws_freeContext(img_ctx);
    img_ctx = NULL;
  }
  printf("close %s\n", __FUNCTION__);
}

EM_PORT_API(void)
decode()
{
  int ret = avcodec_send_packet(videoCodecCtx, pkt);
  if (ret >= 0)
  {
    while ((ret = avcodec_receive_frame(videoCodecCtx, yuvFrame)) >= 0)
    {
      if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
        break;
      else if (ret < 0)
      {
        fprintf(stderr, "Error during decoding\n");
        break;
      }
      if (!img_ctx)
      {
        printf("init img_ctx\n");
        img_ctx = sws_getContext(videoCodecCtx->width,
                                 videoCodecCtx->height,
                                 videoCodecCtx->pix_fmt,
                                 videoCodecCtx->width,
                                 videoCodecCtx->height,
                                 AV_PIX_FMT_RGB32,
                                 SWS_BICUBIC, NULL, NULL, NULL);
        
      }
      if (!out_buffer)
      {
        printf("init out_buffer\n");
        int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB32, videoCodecCtx->width, videoCodecCtx->height, 1);
        out_buffer = (unsigned char *)av_malloc(numBytes * sizeof(unsigned char));

        int res = av_image_fill_arrays(
            rgbFrame->data, rgbFrame->linesize,
            out_buffer, AV_PIX_FMT_RGB32,
            videoCodecCtx->width, videoCodecCtx->height, 1);
        if (res < 0)
        {
          break;
        }
        
      }

      sws_scale(img_ctx,
                yuvFrame->data, yuvFrame->linesize,
                0, videoCodecCtx->height,
                rgbFrame->data, rgbFrame->linesize);
      // printf("codec h264 success.\n");
      av_packet_unref(pkt);
      frameWidth = videoCodecCtx->width;
      frameHeight = videoCodecCtx->height;
      frame = rgbFrame->data[0];
    }
  }
  av_packet_unref(pkt);
}

EM_PORT_API(void)
parsePkt(uint8_t *data, int len)
{
  // printf("parsePkt:%d\n", len);
  int eof = !len;
  while (len > 0 || eof)
  {
    int ret = av_parser_parse2(parser, videoCodecCtx, &pkt->data, &pkt->size,
                               data, 4096, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0);
    if (ret < 0)
    {
      fprintf(stderr, "Error while parsing\n");
      continue;
    }

    data += ret;
    len -= ret;

    if (pkt->size)
    {
      decode();
    }
  }
}

js 胶水

var Module = typeof Module != 'undefined' ? Module : {};

// --pre-jses are emitted after the Module integration code, so that they can
// refer to Module (if they choose; they can also define Module)
Module = {};
Module.onRuntimeInitialized = function () {
  console.log(Module);
  let message = {
    type: "message",
    info: "init"
  };
  postMessage(message);
};

let u8Array;
console.log("Worker: mission start.");
let allDataLength;
let pos = 0;
let startArray;
let dataArray;
let cnt = 0;
let time = "";
let runFlag = false;
let timeMap = new Map();
let codecId = 27;

function getTimesRange() {
  console.log("获取POS");
  timeMap.clear();
  let index = 0;
  timeMap.set(0, 0);
  while (true) {
    if (pos + startLength > allDataLength) {
      break;
    }
    startArray = u8Array.slice(pos, pos + startLength);
    if (pos + singleDataLength * cnt > allDataLength) {
      break;
    }
    pos += singleDataLength * cnt;
    index++;
    timeMap.set(pos, index);
  }
  let message = {
    type: "updatePos",
    info: pos,
    map: timeMap
  };

  postMessage(message);
  pos = 0;
}

function decodeArray() {
  // console.log(allDataLength, pos, new Date().getMilliseconds());
  if (pos + startLength > allDataLength) {
    console.log("Worker: mission finished.");
    pos = 0;
    return;
  }
  startArray = u8Array.slice(pos, pos + startLength);
  pos += startLength;
  if (pos + singleDataLength * cnt > allDataLength) {
    console.log("Worker: mission finished.");
    pos = 0;
    return;
  }
  dataArray = u8Array.slice(pos, pos + singleDataLength * cnt);
  pos += singleDataLength * cnt;
  var ptr = Module._malloc(1024 * cnt * dataArray.BYTES_PER_ELEMENT);
  Module.HEAPU8.set(dataArray, ptr);
  Module._parsePkt(ptr, 1024* cnt);
  let outputPtr = Module._getFrame();
  // console.log("_parsePkt end");
  Module._free(ptr);
  if (0 == outputPtr) {
    if (runFlag) {
      setTimeout(() => {
        decodeArray();
      }, 1);
      return;
    }
  }
  var rgbData = new Uint8ClampedArray(
    Module.HEAPU8.subarray(
      outputPtr,
      outputPtr + Module._getWidth() * Module._getHeight() * 4,
    ),
  );
  let rgbObj = {
    width: Module._getWidth(),
    height: Module._getHeight(),
    rgb: rgbData,
    time: time,
    currentPos: timeMap.get(pos)
  };
  let message = {
    type: "image",
    info: rgbObj
  };
  postMessage(message, [message.info.rgb.buffer]);
  if (runFlag) {
    setTimeout(() => {
      decodeArray();
    }, 1);
  }
}

onmessage = function (e) {
  if ("message" == e.data.type) {
    if ("start" == e.data.info) {
      runFlag = true;
      decodeArray();
    } else if ("stop" == e.data.info) {
      runFlag = false;
    }
  } else if ("updatePos" == e.data.type) { 
    pos = e.data.info;
    runFlag = false;
    decodeArray();
  } else if ("updateCodecId" == e.data.type) { 
    codecId = e.data.info;
    Module._close();
    Module._init(codecId);
    console.log(codecId);
  } else {
    u8Array = e.data;
    allDataLength = u8Array.length;
    pos = 0;
    Module._close();
    Module._init(codecId);
    getTimesRange();
  }
};

emconfigure ./configure --cc="emcc" --cxx="em++" --ar="emar" --prefix=$(pwd)/../decoder_wasm/ffmpeg \
    --enable-cross-compile \
    --target-os=none \
    --arch=x86_32 \
    --cpu=generic \
    --enable-gpl \
    --enable-version3 \
    --disable-avdevice \
    --disable-avformat \
    --disable-swresample \
    --disable-postproc \
    --disable-avfilter     \
    --disable-programs \
    --disable-logging \
    --disable-everything \
    --enable-decoder=hevc \
    --enable-decoder=h264     \
    --enable-parser=hevc \
    --enable-parser=h264     \
    --disable-ffplay \
    --disable-ffprobe \
    --disable-asm \
    --disable-doc \
    --disable-devices \
    --disable-network     \
    --disable-bsfs \
    --disable-debug \
    --disable-protocols \
    --disable-indevs \
    --disable-outdevs



  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
WebAssemblyFFmpeg是两个独立的技术,但它们可以结合使用。WebAssembly是一种低级字节码,可以在网页上运行高性能的语言,如C++和Rust。而FFmpeg是一个功能强大的音视频库,用于处理音视频数据。 引用提到,WebAssembly可以扩展浏览器的应用场景,尤其是一些原本JavaScript无法实现或性能有问题的场景。FFmpeg作为一个功能强大的音视频库,提取视频帧只是其功能的一小部分,后续还有更多WebAssembly的应用场景可以去探索。 引用和引用提供了关于如何构建和编译FFmpegWebAssembly的一些示例代码和命令。这些命令使用emcc来将FFmpeg和相关库编译WebAssemblyJavaScript调用。编译选项可以根据具体需求进行调整,如指定内存大小、导出函数等。 因此,WebAssembly可以使用FFmpeg来处理音视频数据,例如提取视频帧等操作。通过将FFmpeg编译WebAssembly,可以在浏览器中高效地处理音视频数据。123 #### 引用[.reference_title] - *1* *3* [前端视频帧提取 ffmpeg + Webassembly](https://blog.csdn.net/yinshipin007/article/details/130115549)[target="_blank" data-report-click={"spm":"1018.2226.3001.9630","extra":{"utm_source":"vip_chatgpt_common_search_pc_result","utm_medium":"distribute.pc_search_result.none-task-cask-2~all~insert_cask~default-1-null.142^v92^chatsearchT3_1"}} ] [.reference_item] - *2* [搭建webassembly网页播放器(四)---网页调用ffmpeg.js单元测试用例](https://blog.csdn.net/webrtc_video/article/details/115702759)[target="_blank" data-report-click={"spm":"1018.2226.3001.9630","extra":{"utm_source":"vip_chatgpt_common_search_pc_result","utm_medium":"distribute.pc_search_result.none-task-cask-2~all~insert_cask~default-1-null.142^v92^chatsearchT3_1"}} ] [.reference_item] [ .reference_list ]

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值