在Andorid中使用FFmpeg实现YUV解码为BMP

在Andorid中使用FFmpeg实现YUV解码为BMP

最进根据公司项目需要,学习FFmpeg音视频编解码做技术储备,项目是运行在android平台上的,现在需求还不确定,可能需要YUV解码为BMP。

activity_main.xml主要代码

    <LinearLayout
        android:layout_width="match_parent"
        android:layout_height="wrap_content"
        android:orientation="horizontal"
        >

        <TextView
            android:id="@+id/tv_route_5"
            android:layout_width="0dp"
            android:layout_height="match_parent"
            android:layout_weight="3"
            android:gravity="center_vertical"
            app:layout_constraintBottom_toBottomOf="parent"
            app:layout_constraintLeft_toLeftOf="parent"
            app:layout_constraintRight_toRightOf="parent"
            app:layout_constraintTop_toTopOf="parent"/>

        <Button
            android:id="@+id/bt_yuv"
            android:layout_width="wrap_content"
            android:layout_height="wrap_content"
            android:text="选择YUV"
            />
        <Button
            android:id="@+id/bt6"
            android:layout_width="0dp"
            android:layout_height="wrap_content"
            android:layout_weight="1"
            android:text="解析"
            />
    </LinearLayout>
     <android.support.v7.widget.RecyclerView
        android:id="@+id/rv"
        android:layout_width="match_parent"
        android:layout_height="match_parent"/>

bt_yuv打开文件管理器选择文件,tv_route_5显示文件路径,bt6开始解码,rv展示解码后的数据。

MainActivity.java 核心代码

  static {
        System.loadLibrary("native-lib");
    }
    
   /**
     * yuv转换成bmp
     * @param input_jstr 输入的路径
     * @param output_jstr 输出路径
     * @param w_jstr 宽
     * @param h_jstr 高
     * @return
     */
    public native int yuvToBitmap(String input_jstr, String output_jstr, int w_jstr, int h_jstr);

在Android中使用FFmpeg可以参考 在Android Studio中使用cmake编译FFmpeg


    /**
     * 调用系统文件管理器
     *
     * @param type
     */
    public void openSystemFile(int type) {
        Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
        //系统调用Action属性
        intent.setType("*/*");
        //设置文件类型
        intent.addCategory(Intent.CATEGORY_OPENABLE);
        // 添加Category属性
        try {
            startActivityForResult(intent, type);
        } catch (Exception e) {
            Toast.makeText(this, "没有正确打开文件管理器", Toast.LENGTH_SHORT).show();
        }
    }
    
    @Override
    protected void onActivityResult(int requestCode, int resultCode, Intent data) {
        if (resultCode == Activity.RESULT_OK) {//是否选择,没选择就不会继续
            if (requestCode == 4) {
                tvRoute4.setText(videoPath);
            } else {
                Uri uri = data.getData();//得到uri,后面就是将uri转化成file的过程。
                String[] proj = {MediaStore.Images.Media.DATA};
                Cursor actualimagecursor = managedQuery(uri, proj, null, null, null);
                int actual_image_column_index = actualimagecursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
                actualimagecursor.moveToFirst();
                String img_path = actualimagecursor.getString(actual_image_column_index);
                if (img_path.endsWith(".yuv")) {
                  if (requestCode == 5) {
                        tvRoute5.setText(img_path);
                  }
                } else {
                    Toast.makeText(this, "请选择正确的文件", Toast.LENGTH_SHORT).show();
                }
            }
        }
    }

	/**
     * 把YUV格式的视频解析成BMP
     */
    private void conversionYUV(final TextView tvRoute, final int w_jstr, final int h_jstr) {
        dialog.showDialog();
        String[] videoInfo = tvRoute.getText().toString().split("/");
        String fileName = videoInfo[videoInfo.length - 1];
        final String[] fileNames = fileName.split("\\.");
        new Thread(new Runnable() {
            public void run() {

                picNum = yuvToBitmap(tvRoute.getText().toString(), "/storage/emulated/0/Download/avtest/" + fileNames[0] + ".rgb", w_jstr, h_jstr);
                //成功
                if (picNum >= 0) {
                    handler.sendEmptyMessage(0);
                } else {
                    handler.sendEmptyMessage(1);
                }
            }
        }).start();
    }
	Handler handler = new Handler(new Handler.Callback() {
        @Override
        public boolean handleMessage(Message message) {
            if (message.what == 0) {
                dealResult();
            } else if (message.what == 1) {
                Toast.makeText(MainActivity.this, "Error", Toast.LENGTH_SHORT).show();
                dialog.dismiss();
            } 
            return false;
        }
    });

    /**
     * 刷新RV,展示解析的数据
     */
    private void dealResult() {
        for (int i = 0; i < picNum; i++) {
            dataImage.add("/storage/emulated/0/Download/avtest/img/_" + i + ".bmp");
        }
        adapterImage.notifyDataSetChanged();
        dialog.dismiss();
    }

打开文件管理器,选择YUV文件,在子线程中调用yuvToBitmap方法进行解码,在Handler中更新UI。

native-lib.cpp核心代码

/**
 * RGB数据->BMP
 * @param rgb24_buffer 
 * @param width 
 * @param height 
 * @param index 
 * @return 
 */
int SaveAsBMP(uint8_t *rgb24_buffer, int width, int height, int index) {
	typedef struct {
		long imageSize;
		long blank;
		long startPosition;
	} BmpHead;

	typedef struct {
		long Length;
		long width;
		long height;
		unsigned short colorPlane;
		unsigned short bitColor;
		long zipFormat;
		long realSize;
		long xPels;
		long yPels;
		long colorUse;
		long colorImportant;
	} InfoHead;

	int i = 0, j = 0;
	BmpHead m_BMPHeader = {0};
	InfoHead m_BMPInfoHeader = {0};
	char bfType[2] = {'B', 'M'};
	int header_size = sizeof(bfType) + sizeof(BmpHead) + sizeof(InfoHead);
//	unsigned char *rgb24_buffer = NULL;
	FILE *fp_bmp = NULL;

	char *filename = new char[255];  //文件存放路径,根据自己的修改
	sprintf(filename, "%s_%d.bmp", "/storage/emulated/0/Download/avtest/img/", index);

//	if ((fp_rgb24 = fopen(rgb24path, "rb")) == NULL) {
//		printf("Error: Cannot open input RGB24 file.\n");
//		return -1;
//	}
	if ((fp_bmp = fopen(filename, "wb")) == NULL) {
		printf("Error: Cannot open output BMP file.\n");
		return -1;
	}

//	rgb24_buffer = (unsigned char *) malloc(width * height * 3);
//	fread(rgb24_buffer, 1, width * height * 3, str);
	m_BMPHeader.imageSize = 3 * width * height + header_size;
	m_BMPHeader.startPosition = header_size;

	m_BMPInfoHeader.Length = sizeof(InfoHead);
	m_BMPInfoHeader.width = width;
	//BMP storage pixel data in opposite direction of Y-axis (from bottom to top).
	m_BMPInfoHeader.height = -height;
	m_BMPInfoHeader.colorPlane = 1;
	m_BMPInfoHeader.bitColor = 24;
	m_BMPInfoHeader.realSize = 3 * width * height;

	fwrite(bfType, 1, sizeof(bfType), fp_bmp);
	fwrite(&m_BMPHeader, 1, sizeof(m_BMPHeader), fp_bmp);
	fwrite(&m_BMPInfoHeader, 1, sizeof(m_BMPInfoHeader), fp_bmp);
	//BMP save R1|G1|B1,R2|G2|B2 as B1|G1|R1,B2|G2|R2
	//It saves pixel data in Little Endian
	//So we change 'R' and 'B'
	for (j = 0; j < height; j++) {
		for (i = 0; i < width; i++) {
			char temp = rgb24_buffer[(j * width + i) * 3 + 2];
			rgb24_buffer[(j * width + i) * 3 + 2] = rgb24_buffer[(j * width + i) * 3 + 0];
			rgb24_buffer[(j * width + i) * 3 + 0] = temp;
		}
	}

	fwrite(rgb24_buffer, 3 * width * height, 1, fp_bmp);
	fclose(fp_bmp);
	return 0;
}

/***
 * yuv转换成bmp
 */
extern "C"
JNIEXPORT jint JNICALL
Java_com_yodosmart_ffmpegdemo_MainActivity_yuvToBitmap(JNIEnv *env, jobject instance,
													   jstring input_jstr_, jstring output_jstr_,
													   jint w_jstr, jint h_jstr) {
	const char *input_jstr = env->GetStringUTFChars(input_jstr_, 0);
	const char *output_jstr = env->GetStringUTFChars(output_jstr_, 0);

	//Parameters
	FILE *src_file = fopen(input_jstr, "rb");
	const int src_w = w_jstr, src_h = h_jstr;
	AVPixelFormat src_pixfmt = AV_PIX_FMT_YUV420P;

	int src_bpp = av_get_bits_per_pixel(av_pix_fmt_desc_get(src_pixfmt));

	FILE *dst_file = fopen(output_jstr, "wb");
	const int dst_w = w_jstr, dst_h = h_jstr;
	AVPixelFormat dst_pixfmt = AV_PIX_FMT_RGB24;
	int dst_bpp = av_get_bits_per_pixel(av_pix_fmt_desc_get(dst_pixfmt));

	//Structures
	uint8_t *src_data[4];
	int src_linesize[4];

	uint8_t *dst_data[4];
	int dst_linesize[4];

	int rescale_method = SWS_BICUBIC;
	struct SwsContext *img_convert_ctx;
	uint8_t *temp_buffer = (uint8_t *) malloc(src_w * src_h * src_bpp / 8);

	int frame_idx = 0;
	int ret = 0;
	ret = av_image_alloc(src_data, src_linesize, src_w, src_h, src_pixfmt, 1);
	if (ret < 0) {
		printf("Could not allocate source image\n");
		return -1;
	}
	ret = av_image_alloc(dst_data, dst_linesize, dst_w, dst_h, dst_pixfmt, 1);
	if (ret < 0) {
		printf("Could not allocate destination image\n");
		return -1;
	}
	//-----------------------------
	//Init Method 1
	img_convert_ctx = sws_alloc_context();
	//Show AVOption
//	av_opt_show2(img_convert_ctx, stdout, AV_OPT_FLAG_VIDEO_PARAM, 0);
	//Set Value
//	av_opt_set_int(img_convert_ctx, "sws_flags", SWS_BICUBIC | SWS_PRINT_INFO, 0);
	av_opt_set_int(img_convert_ctx, "srcw", src_w, 0);
	av_opt_set_int(img_convert_ctx, "srch", src_h, 0);
	av_opt_set_int(img_convert_ctx, "src_format", src_pixfmt, 0);
	//'0' for MPEG (Y:0-235);'1' for JPEG (Y:0-255)
	av_opt_set_int(img_convert_ctx, "src_range", 1, 0);
	av_opt_set_int(img_convert_ctx, "dstw", dst_w, 0);
	av_opt_set_int(img_convert_ctx, "dsth", dst_h, 0);
	av_opt_set_int(img_convert_ctx, "dst_format", dst_pixfmt, 0);
	av_opt_set_int(img_convert_ctx, "dst_range", 1, 0);
	sws_init_context(img_convert_ctx, NULL, NULL);

	//Init Method 2
	//img_convert_ctx = sws_getContext(src_w, src_h,src_pixfmt, dst_w, dst_h, dst_pixfmt,
	//  rescale_method, NULL, NULL, NULL);
	//-----------------------------
	/*
	//Colorspace
	ret=sws_setColorspaceDetails(img_convert_ctx,sws_getCoefficients(SWS_CS_ITU601),0,
		sws_getCoefficients(SWS_CS_ITU709),0,
		 0, 1 << 16, 1 << 16);
	if (ret==-1) {
		printf( "Colorspace not support.\n");
		return -1;
	}
	*/
	while (1) {
		if (fread(temp_buffer, 1, src_w * src_h * src_bpp / 8, src_file) !=
			src_w * src_h * src_bpp / 8) {
			break;
		}

		switch (src_pixfmt) {
			case AV_PIX_FMT_GRAY8: {
				memcpy(src_data[0], temp_buffer, src_w * src_h);
				break;
			}
			case AV_PIX_FMT_YUV420P: {
				memcpy(src_data[0], temp_buffer, src_w * src_h);                    //Y
				memcpy(src_data[1], temp_buffer + src_w * src_h, src_w * src_h / 4);      //U
				memcpy(src_data[2], temp_buffer + src_w * src_h * 5 / 4, src_w * src_h / 4);  //V
				break;
			}
			case AV_PIX_FMT_YUV422P: {
				memcpy(src_data[0], temp_buffer, src_w * src_h);                    //Y
				memcpy(src_data[1], temp_buffer + src_w * src_h, src_w * src_h / 2);      //U
				memcpy(src_data[2], temp_buffer + src_w * src_h * 3 / 2, src_w * src_h / 2);  //V
				break;
			}
			case AV_PIX_FMT_YUV444P: {
				memcpy(src_data[0], temp_buffer, src_w * src_h);                    //Y
				memcpy(src_data[1], temp_buffer + src_w * src_h, src_w * src_h);        //U
				memcpy(src_data[2], temp_buffer + src_w * src_h * 2, src_w * src_h);      //V
				break;
			}
			case AV_PIX_FMT_YUYV422: {
				memcpy(src_data[0], temp_buffer, src_w * src_h * 2);                  //Packed
				break;
			}
			case AV_PIX_FMT_RGB24: {
				memcpy(src_data[0], temp_buffer, src_w * src_h * 3);                  //Packed
				break;
			}
			default: {
				printf("Not Support Input Pixel Format.\n");
				break;
			}
		}

		sws_scale(img_convert_ctx, (const uint8_t *const *) src_data, src_linesize, 0, src_h,
				  dst_data, dst_linesize);
		printf("Finish process frame %5d\n", frame_idx);

		switch (dst_pixfmt) {
			case AV_PIX_FMT_GRAY8: {
				fwrite(dst_data[0], 1, dst_w * dst_h, dst_file);
				break;
			}
			case AV_PIX_FMT_YUV420P: {
				fwrite(dst_data[0], 1, dst_w * dst_h, dst_file);                 //Y
				fwrite(dst_data[1], 1, dst_w * dst_h / 4, dst_file);               //U
				fwrite(dst_data[2], 1, dst_w * dst_h / 4, dst_file);               //V
				break;
			}
			case AV_PIX_FMT_YUV422P: {
				fwrite(dst_data[0], 1, dst_w * dst_h, dst_file);                 //Y
				fwrite(dst_data[1], 1, dst_w * dst_h / 2, dst_file);               //U
				fwrite(dst_data[2], 1, dst_w * dst_h / 2, dst_file);               //V
				break;
			}
			case AV_PIX_FMT_YUV444P: {
				fwrite(dst_data[0], 1, dst_w * dst_h, dst_file);                 //Y
				fwrite(dst_data[1], 1, dst_w * dst_h, dst_file);                 //U
				fwrite(dst_data[2], 1, dst_w * dst_h, dst_file);                 //V
				break;
			}
			case AV_PIX_FMT_YUYV422: {
				fwrite(dst_data[0], 1, dst_w * dst_h * 2, dst_file);               //Packed
				break;
			}
			case AV_PIX_FMT_RGB24: {
				fwrite(dst_data[0], 1, dst_w * dst_h * 3, dst_file);               //Packed
				SaveAsBMP(dst_data[0], src_w, src_h, frame_idx);
				break;
			}
			case AV_PIX_FMT_BGR24: {
				fwrite(dst_data[0], 1, dst_w * dst_h * 3, dst_file);               //Packed
				SaveAsBMP(dst_data[0], src_w, src_h, frame_idx);
				break;
			}
			default: {
				printf("Not Support Output Pixel Format.\n");
				break;
			}
		}

		frame_idx++;
	}

	sws_freeContext(img_convert_ctx);

	free(temp_buffer);
	fclose(dst_file);
	av_freep(&src_data[0]);
	av_freep(&dst_data[0]);


	return frame_idx;
}

分为两个部分:

  1. 将YUV数据解析为RGB数据
  2. 将RGB数据解析为BMP
    对应上面代码中的两个函数

注意: RGB和BGR的转换

代码下载

参考资料
http://blog.csdn.net/leixiaohua1020/article/details/42134965
http://blog.csdn.net/leixiaohua1020/article/details/50534150

欢迎关注我的公众号,持续分析优质技术文章
欢迎关注我的公众号

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值