1.视频分辨率与缓冲分辨率
偶然看到了这个,这里的缓冲分辨率正好对应前面某次计算时针对width *height的扩展量。
2.再尝试一下不同的编解码器
kde@ai:/sbin$ gst-inspect-1.0 |grep -i hevc
libav: avdec_h265: libav HEVC (High Efficiency Video Coding) decoder
de265: libde265dec: HEVC/H.265 decoder
测试结果参见附录A和附录B。
3.初步结论:
结论似乎是:问题确实出现在decoder处。但是为什么呢?
3732480的帧长,与1920*1080相较,对应:2304*1080
这种2304*1080也是一种广泛使用的分辨率。
这是某款相机的视频输出分辨率。然后依照关键字2304,rk3588平台的这个问题似乎有人报告过:
H265解码与H264解码后的MppFrame编码成JPG的问题 · Issue #147 · rockchip-linux/mpp · GitHub
4.处理:
<待续....>
附录A Python从文件源截取一帧的代码:
注意decoder,我试过:libde265dec、avdec_h265、mppvideodec
各种decoder对应的输出文件尺寸:
- libde265dec
frame len =3110400 - avdec_h265
frame len =3110400 - mppvideodec
frame len =3732480
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# 获取当前脚本文件所在目录的父目录,并构建相对路径
import os
import sys
current_dir = os.path.dirname(os.path.abspath(__file__))
project_path = os.path.join(current_dir, '..')
sys.path.append(project_path)
sys.path.append(current_dir)
import gi
import cv2
import threading
import time
import warnings
import queue
gi.require_version('Gst', '1.0')
from gi.repository import Gst, GLib
def on_new_sample(sink):
sample = sink.emit('pull-sample')
buffer = sample.get_buffer()
# 检查 buffer 是否有效
if buffer:
'''
caps = buffer.get_caps()
structure = caps.get_structure(0)
print(f'{structure}')
width = structure.get_int('width')[1]
height = structure.get_int('height')[1]
print(f"Buffer Width: {width}, Height: {height}")
'''
# 创建一个 mapinfo 对象以映射数据
success, mapinfo = buffer.map(Gst.MapFlags.READ)
if success:
# 获取映射的字节数据
data = mapinfo.data
print(f'frame len ={len(data)}')
# 文件路径
file_path = 'frame_by_soft_decode_of_h265file.bin'
# 打开文件并写入数据
with open(file_path, 'wb') as file:
file.write(data)
# 取消映射
buffer.unmap(mapinfo)
sys.exit(0)
return Gst.FlowReturn.OK
Gst.init(None)
pipeline = Gst.parse_launch('filesrc location=/home/kde/feng_test/mpp_decode_test/mpp-test/Tennis1080p.h265 ! h265parse ! libde265dec ! videoconvert ! appsink emit-signals=True max-buffers=1 drop=True sync=False')
appsink = pipeline.get_by_name('appsink0')
appsink.connect('new-sample', on_new_sample)
pipeline.set_state(Gst.State.PLAYING)
GLib.MainLoop().run()
附录B C代码抓取一帧
各种decoder对应的输出文件尺寸:
- libde265dec
frame len =3110400 - avdec_h265
frame len =3110400 - mppvideodec
frame len =3732480
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <stdio.h>
char *__OUT_FILE__ = "./from_c_code.yuv";
// 定义一个结构体来保存多个参数
typedef struct {
int buffer_size;
void *buffer;
} ThreadParams;
void *write_file(void* arg)
{
// 将参数转换回结构体指针
ThreadParams* params = (ThreadParams*)arg;
void *buffer = params->buffer;
int buffer_size = params->buffer_size;
// 2. 打开输出文件
FILE *out_fp = fopen(__OUT_FILE__, "wb+");
if (!out_fp) {
printf("fopen error\n");
return (void*)-1;
}
fwrite(buffer, 1, buffer_size, out_fp);
g_usleep(1000000ul);
fflush(stdout);
fflush(out_fp);
fclose(out_fp);
}
int write_file_in_thread_sync(void *buffer, int buffer_size)
{
pthread_t thread;
static ThreadParams params;
// 初始化参数
params.buffer = buffer;
params.buffer_size = buffer_size;
// 创建线程
if (pthread_create(&thread, NULL, write_file, (void*)¶ms) != 0) {
perror("Failed to create thread");
return EXIT_FAILURE;
}
// 等待线程完成
if (pthread_join(thread, NULL) != 0) {
perror("Failed to join thread");
return EXIT_FAILURE;
}
}
static gboolean on_new_sample_from_sink (GstAppSink *sink, gpointer *user_data) {
GstSample *sample;
GstBuffer *buffer;
GstMapInfo map;
printf("..........................................................\n");
fflush(stdout);
sample = gst_app_sink_pull_sample(sink);
buffer = gst_sample_get_buffer(sample);
if (buffer) {
// 获取缓冲区大小
gsize buffer_size = gst_buffer_get_size(buffer);
printf("Buffer size: %u bytes\n", buffer_size);
gst_buffer_map(buffer, &map, GST_MAP_READ);
write_file_in_thread_sync(map.data, buffer_size);
gst_buffer_unmap(buffer, &map);
}
gst_sample_unref(sample);
g_usleep(1000000ul);
exit(0);
return FALSE; // 返回 FALSE 表示不再处理新样本
}
static void on_pad_added(GstElement *element, GstPad *pad, gpointer data) {
printf("on_pad_added ............enter..\n");
fflush(stdout);
GstPad *sink_pad = gst_element_get_static_pad((GstElement *)data, "sink");
if (gst_pad_is_linked(sink_pad)) {
gst_object_unref(sink_pad);
return;
}
GstCaps *caps = gst_pad_query_caps(pad, NULL);
GstStructure *str = gst_caps_get_structure(caps, 0);
const gchar *name = gst_structure_get_name(str);
printf("on_pad_added....caps.name=%s", name);
if (g_str_has_prefix(name, "video/")) {
gst_pad_link(pad, sink_pad);
}
gst_object_unref(sink_pad);
}
int main(int argc, char *argv[]) {
//#define NORMAL_INIT
#ifdef NORMAL_INIT
GstElement *pipeline, *source, *decode, *convert, *sink, *parser;
GstBus *bus;
GstMessage *msg;
gst_init(&argc, &argv);
pipeline = gst_pipeline_new("video-capture");
source = gst_element_factory_make("filesrc", "source");
decode = gst_element_factory_make("decodebin", "decoder"); //normal way.
convert = gst_element_factory_make("videoconvert", "converter");
sink = gst_element_factory_make("appsink", "sink");
if (!pipeline || !source || !decode || !convert || !sink) {
g_printerr("Not all elements could be created.\n");
return -1;
}
g_object_set(source, "location", "/home/kde/feng_test/mpp_decode_test/mpp-test/Tennis1080p.h265", NULL);
g_object_set(sink, "emit-signals", TRUE, "max-buffers", 1, "drop", TRUE, NULL);
gst_bin_add_many(GST_BIN(pipeline), source, decode, convert, sink, NULL);
gst_element_link(source, decode);
gst_element_link_many(convert, sink, NULL);
g_signal_connect(decode, "pad-added", G_CALLBACK(on_pad_added), convert);
g_signal_connect(sink, "new-sample", G_CALLBACK(on_new_sample_from_sink), NULL);
bus = gst_element_get_bus(pipeline);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_EOS | GST_MESSAGE_ERROR);
if (msg != NULL) {
gst_message_unref(msg);
}
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(bus);
gst_object_unref(pipeline);
#else
GstElement *pipeline, *source, *parser, *decode, *convert, *sink, *capsfilter;
GstCaps *caps;
GstBus *bus;
GstMessage *msg;
gst_init(&argc, &argv);
pipeline = gst_pipeline_new("pipeline");
source = gst_element_factory_make("filesrc", "source");
parser = gst_element_factory_make("h265parse", "parser");
decode = gst_element_factory_make("avdec_h265", "decoder");
convert = gst_element_factory_make("videoconvert", "converter");
sink = gst_element_factory_make("appsink", "sink");
capsfilter = gst_element_factory_make("capsfilter", "capsfilter");
if (!pipeline || !source || !parser || !decode || !convert || !sink || !capsfilter) {
g_printerr("Not all elements could be created.\n");
return -1;
}
g_object_set(source, "location", "/home/kde/feng_test/mpp_decode_test/mpp-test/Tennis1080p.h265", NULL);
g_object_set(sink, "emit-signals", TRUE, "max-buffers", 300, "drop", TRUE, NULL);
// Define caps with color matrix settings for I420 format
caps = gst_caps_new_simple("video/x-h265",
"format", G_TYPE_STRING, "I420",
"colorimetry", G_TYPE_STRING, "bt601",
NULL);
g_object_set(capsfilter, "caps", caps, NULL);
gst_caps_unref(caps);
gst_bin_add_many(GST_BIN(pipeline), source, parser, capsfilter, decode, convert, sink, NULL);
if (!gst_element_link_many(source, parser, capsfilter, decode, convert, sink, NULL)) {
g_printerr("Elements could not be linked.\n");
gst_object_unref(pipeline);
return -1;
}
g_signal_connect(sink, "new-sample", G_CALLBACK(on_new_sample_from_sink), NULL);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
bus = gst_element_get_bus(pipeline);
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
if (msg != NULL) {
gst_message_unref(msg);
}
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(bus);
gst_object_unref(pipeline);
return 0;
#endif
}