废话不说,直接上代码:
header写入成功:
-(void) init_mp4saving:(AVFormatContext*) pFormatCtx videoStreamin:(int) videoStream savenamein:(NSString *)savename {
i_video_stream=pFormatCtx->streams[videoStream];
int ret;
NSFileManager *fileManager = [NSFileManager defaultManager];
NSCalendar *curCalendar = [NSCalendar currentCalendar];
NSUInteger unitFlags = NSHourCalendarUnit | NSMinuteCalendarUnit | NSSecondCalendarUnit;
NSDateComponents *dateComponents = [curCalendar components:unitFlags fromDate:[NSDate date]];
movBasePath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
filePath = [movBasePath stringByAppendingPathComponent:[NSString stringWithFormat:@"%ld-%ld-%ld.mp4",(long)dateComponents.hour, (long)dateComponents.minute, (long)dateComponents.second ]];
[fileManager createFileAtPath:filePath contents:nil attributes:nil];
const char *out_filename = [filePath UTF8String];//URL 路径
// av_register_all();//注册所有容器格式和CODEC
avformat_alloc_output_context2(&outfmt_ctx, NULL, "mp4", out_filename);// 初始化一个用于输出的AVFormatContext结构体
if (!outfmt_ctx) {
NSLog(@ "Could not create output context\n");
}
outFormat = outfmt_ctx->oformat;
AVCodec *codec = avcodec_find_decoder(AV_CODEC_ID_H264);//查找对应的解码器
AVStream *out_stream = avformat_new_stream(outfmt_ctx, codec);//创建输出码流的AVStream
if (!out_stream){
NSLog(@ "Failed allocating output stream\n");
}
out_Cdc_ctx=out_stream->codec;
out_Cdc_ctx->pix_fmt = i_video_stream->codec->pix_fmt;
int FPS=30;
out_Cdc_ctx = out_stream->codec;
out_Cdc_ctx->extradata = i_video_stream->codec->extradata;
out_Cdc_ctx->extradata_size = i_video_stream->codec->extradata_size;
out_Cdc_ctx->bit_rate = 16*1000;
out_Cdc_ctx->codec_id = i_video_stream->codec->codec_id;
out_Cdc_ctx->codec_type = i_video_stream->codec->codec_type;
out_Cdc_ctx->time_base.num =1;// i_video_stream->time_base.num;
out_Cdc_ctx->time_base.den =25;// i_video_stream->time_base.den;
out_Cdc_ctx->time_base = (AVRational){1,FPS};
fprintf(stderr, "time_base.num = %d time_base.den = %d\n", out_Cdc_ctx->time_base.num, out_Cdc_ctx->time_base.den);
out_Cdc_ctx->width = i_video_stream->codec->width;
out_Cdc_ctx->height = i_video_stream->codec->height;
out_Cdc_ctx->pix_fmt = i_video_stream->codec->pix_fmt;
NSLog(@"mp4save width:%d height:%d pix_fmt: %d", out_Cdc_ctx->width, out_Cdc_ctx->height, out_Cdc_ctx->pix_fmt);
out_Cdc_ctx->flags = i_video_stream->codec->flags;
out_Cdc_ctx->flags |= CODEC_FLAG_GLOBAL_HEADER;
out_Cdc_ctx->me_range = i_video_stream->codec->me_range;
out_Cdc_ctx->max_qdiff = i_video_stream->codec->max_qdiff;
out_Cdc_ctx->qmin = i_video_stream->codec->qmin;
out_Cdc_ctx->qmax = i_video_stream->codec->qmax;
out_Cdc_ctx->qcompress = i_video_stream->codec->qcompress;
// ret = avcodec_copy_context(out_stream->codec, avcodec_alloc_context3(codec));//拷贝输入视频码流的AVCodecContex的数值t到输出视频的AVCodecContext。
// if (ret < 0)
// {
// NSLog(@ "Failed to copy context from input to output stream codec context\n");
// }
// out_stream->codec->pix_fmt = AV_PIX_FMT_YUV420P;//支持的像素格式
// out_stream->codec->flags = CODEC_FLAG_GLOBAL_HEADER;
// out_stream->codec->width = i_video_stream->codec->width;
// out_stream->codec->height = i_video_stream->codec->height;
// out_stream->time_base=i_video_stream->time_base;
out_stream->codec->time_base = (AVRational){1,FPS};
out_stream->codec->gop_size = FPS;
// out_stream->codec->bit_rate = 16*1000;
// out_stream->codec->codec_tag = 0;
out_stream->codec->time_base = (AVRational){1,FPS};
if (outfmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
{
out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
// AVBitStreamFilterContext \*avFilter = av_bitstream_filter_init("h264_mp4toannexb");
// out_stream->codec->extradata_size = size;
// out_stream->codec->extradata = (uint8_t \*)av_malloc(size + FF_INPUT_BUFFER_PADDING_SIZE);
//输出一下格式------------------
av_dump_format(outfmt_ctx, 0, out_filename, 1);
if (!(outFormat->flags & AVFMT_NOFILE))
{
ret = avio_open(&outfmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
if (ret < 0)
{
NSLog(@ "Could not open output file %@", out_filename);
}
}
// //写文件头(Write file header)
AVDictionary* opt = NULL;
//av_dict_set(&opt, "video_track_timescale", "25", 0);
av_dict_set(&opt, "timescale", "25", 0);
av_dict_set(&opt, "video_timescale", "25", 0);
av_dict_set(&opt, "track_timescale", "25", 0);
ret = avformat_write_header(outfmt_ctx, &opt);
if (ret < 0)
{
NSLog(@ "Error occurred when opening output file\n");
}
// input_video_stream = *(pFormatCtx->streams[videoStream]);
//o_video_stream->time_base.num = 1;
// o_video_stream->time_base.den = 25;
}
保存帧和尾的代码:
bool isIDR = true;
unsigned char c[5] ={ 0x00, 0x00, 0x00, 0x01, 0x67};
int n=0;
for( n =0 ;n<5;n++){
if(vp.data[n]!=c[n]){
isIDR=false;break;
}
}
if(!isIDR){
if(record_status==1){
// sprintf(savename,"/sdcard/record_%d.mp4", ++record_num);
savename = [NSHomeDirectory()stringByAppendingPathComponent:@"Documents/Movie2.mp4"];
[self init_mp4saving:pFormatCtx videoStreamin:videoStream savenamein:savename ];
last_pts = 0;
last_dts = 0;
finished = 0;
NSLog(@"record_status ok : record_status = %d \n",record_status);
record_status=2;
}
}
if(record_status==2){
frame_num++;
NSLog(@"record_status ok : record_status = %d \n",record_status);
NSLog(@"frame index %d\n", frame_num);
packet.flags |= AV_PKT_FLAG_KEY;
packet.pts += last_pts;
// if (pCodecCtx->coded_frame->pts != AV_NOPTS_VALUE){
// packet.pts= av_rescale_q(pCodecCtx->coded_frame->pts, pCodecCtx->time_base, _videoStream->time_base);
// NSLog(@"frame index2 %d packet->pts %d\n", frame_num,packet.pts);
// }
packet.dts += last_dts;
packet.stream_index = 0;
packet.duration=1;
av_interleaved_write_frame(outfmt_ctx, &packet);
if(frame_num>200){
record_status=3;
}
} else if(record_status==3){
// avformat_close_input(&i_fmt_ctx);
NSLog(@"av_write_trailer!!!!!!!!!! \n" );
av_write_trailer(outfmt_ctx);
NSLog(@" avcodec_close!!!!!!!! \n" );
avcodec_close(outfmt_ctx->streams[0]->codec);
av_freep(&outfmt_ctx->streams[0]->codec);
av_freep(&outfmt_ctx->streams[0]);
avio_close(outfmt_ctx->pb);
av_free(outfmt_ctx);
record_status = 0;
NSLog(@"record_end\n");
}