编码为H264裸流并写文件一

为什么说是裸流呢,因为这篇只讲到把视频编码后写入文件,得到的是一个后缀应该为“.h264”的文件,只能用PotPlayer等功能强大的播放器才能打开,因为它没有容器信息,需要装在MP4、MKV之类的容器中,才是一个真正的视频文件。但这一篇只讲编码后直接写入文件,以生成一个H264裸流文件。

代码是从FFMPEG的2.8版本的官方示例中搬运的:https://ffmpeg.org/doxygen/2.8/decoding_encoding_8c-example.html

 

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

static void video_encode_example(const char *filename, int codec_id)

{

    AVCodec *codec;

    AVCodecContext *c= NULL;

    int i, ret, x, y, got_output;

    FILE *f;

    AVFrame *frame;

    AVPacket pkt;

    uint8_t endcode[] = { 0, 0, 1, 0xb7 };

    printf("Encode video file %s\n", filename);

    /* find the mpeg1 video encoder */

    codec = avcodec_find_encoder(codec_id);

    if (!codec) {

        fprintf(stderr, "Codec not found\n");

        exit(1);

    }

    c = avcodec_alloc_context3(codec);

    if (!c) {

        fprintf(stderr, "Could not allocate video codec context\n");

        exit(1);

    }

    /* put sample parameters */

    c->bit_rate = 400000;

    /* resolution must be a multiple of two */

    c->width = 352;

    c->height = 288;

    /* frames per second */

    c->time_base = (AVRational){1,25};

    /* emit one intra frame every ten frames

     * check frame pict_type before passing frame

     * to encoder, if frame->pict_type is AV_PICTURE_TYPE_I

     * then gop_size is ignored and the output of encoder

     * will always be I frame irrespective to gop_size

     */

    c->gop_size = 10;

    c->max_b_frames = 1;

    c->pix_fmt = AV_PIX_FMT_YUV420P;

    if (codec_id == AV_CODEC_ID_H264)

        av_opt_set(c->priv_data, "preset", "slow", 0);

    /* open it */

    if (avcodec_open2(c, codec, NULL) < 0) {

        fprintf(stderr, "Could not open codec\n");

        exit(1);

    }

    f = fopen(filename, "wb");

    if (!f) {

        fprintf(stderr, "Could not open %s\n", filename);

        exit(1);

    }

    frame = av_frame_alloc();

    if (!frame) {

        fprintf(stderr, "Could not allocate video frame\n");

        exit(1);

    }

    frame->format = c->pix_fmt;

    frame->width  = c->width;

    frame->height = c->height;

    /* the image can be allocated by any means and av_image_alloc() is

     * just the most convenient way if av_malloc() is to be used */

    ret = av_image_alloc(frame->data, frame->linesize, c->width, c->height,

                         c->pix_fmt, 32);

    if (ret < 0) {

        fprintf(stderr, "Could not allocate raw picture buffer\n");

        exit(1);

    }

    /* encode 1 second of video */

    for (i = 0; i < 25; i++) {

        av_init_packet(&pkt);

        pkt.data = NULL;    // packet data will be allocated by the encoder

        pkt.size = 0;

        fflush(stdout);

        /* prepare a dummy image */

        /* Y */

        for (y = 0; y < c->height; y++) {

            for (x = 0; x < c->width; x++) {

                frame->data[0][y * frame->linesize[0] + x] = x + y + i * 3;

            }

        }

        /* Cb and Cr */

        for (y = 0; y < c->height/2; y++) {

            for (x = 0; x < c->width/2; x++) {

                frame->data[1][y * frame->linesize[1] + x] = 128 + y + i * 2;

                frame->data[2][y * frame->linesize[2] + x] = 64 + x + i * 5;

            }

        }

        frame->pts = i;

        /* encode the image */

        ret = avcodec_encode_video2(c, &pkt, frame, &got_output);

        if (ret < 0) {

            fprintf(stderr, "Error encoding frame\n");

            exit(1);

        }

        if (got_output) {

            printf("Write frame %3d (size=%5d)\n", i, pkt.size);

            fwrite(pkt.data, 1, pkt.size, f);

            av_free_packet(&pkt);

        }

    }

    /* get the delayed frames */

    for (got_output = 1; got_output; i++) {

        fflush(stdout);

        ret = avcodec_encode_video2(c, &pkt, NULL, &got_output);

        if (ret < 0) {

            fprintf(stderr, "Error encoding frame\n");

            exit(1);

        }

        if (got_output) {

            printf("Write frame %3d (size=%5d)\n", i, pkt.size);

            fwrite(pkt.data, 1, pkt.size, f);

            av_free_packet(&pkt);

        }

    }

    /* add sequence end code to have a real mpeg file */

    fwrite(endcode, 1, sizeof(endcode), f);

    fclose(f);

    avcodec_close(c);

    av_free(c);

    av_freep(&frame->data[0]);

    av_frame_free(&frame);

    printf("\n");

}

 

将上述代码中的AVFrame改为从摄像头获取的AVFrame,就实现了摄像头数据编码为H264裸流了,但有几点要注意。

  1. 代码中的av_opt_set(c->priv_data, "preset", "slow", 0)声明了编码速度,值有ultrafast、superfast、veryfast、faster、fast、medium、slow、slower、veryslow、placebo,越快视频质量则越差,但如果想实现实时编码(即编码速度不慢于输入速度),还要加一句av_opt_set(pOutputVideoCodecCtx->priv_data, "tune", "zerolatency", 0)。
  2. 每个packet的pts非常重要,当然也比较简单,设置好帧率(time_base)之后,pts累加即可。
  3. 最后的“get the delayed frames”也叫作“flush encoder”,意思是编码器中一般会缓存一些数据,最后要取出来。第1点中提到的编码速度越快,编码器中缓存的Packet也会越少,反之则越多。
  • 0
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 3
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 3
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值