h264 / aac es流转ts流 代码

开发中遇到调用sdk返回音视频es数据,需要封包成ts流转发到组播,再拉流回放,其中es到ts之间经历了两次封包,其中细节可参考如下文章:

打包TS

es pes ts 分析

关于es ts

几代人的努力,智慧的结晶,人类的文明之光,黑暗中的那道光,出现吧,源码!!!

头文件 es2ts.h

#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <sys/types.h>
#include <time.h>

#define DATA_TYPE_UNKNOWN 0
#define DATA_TYPE_VIDEO	96
#define DATA_TYPE_AUDIO	98

#define DATA_TYPE_USER	3

#define MAX_SIZE_SAVE_TS 6000*188

#define TS_PKT_LEN	188
#define DEF_TS_PKT_LEN	1316
#define LPCMPACKTLEN	4
#define PCMAUDIOPETPACKETLEN	14
#define PMTLEN	44
#define DATELEN	22
#define DATELEN1	12
#define VIDEOTIMEINTERVAL 3000
#define AUDIOTIMEINTERVAL 1920//1875
#define CACHEVIDEOFREMCNT	15 //视频缓存个数
#define VIDEOFREAMLEN	150



typedef struct
{
	unsigned int 		m_iDatalen;
	int				m_frametype;
	unsigned int		m_uiPartTimeTamp;//音频分帧时间戳
	unsigned char		m_c48KTwoChnPcmBufPart[8192];
}TAudio48KTwoChnPart;

typedef struct
{
	int 				m_iDatalen;
	int 				m_frametype;
	unsigned int		m_uTimeTamp;
	unsigned char		m_cVideoBuf[VIDEOFREAMLEN * 1024];
}TVideoFream;

typedef struct _es2tsParam
{
	unsigned short pc48KTwoChnPcmBigEndianBuf[4096 * 3];//存放转化后的大端音频格式
	TAudio48KTwoChnPart Audio48KTwoChnPcmPart[6];//存放音频分解的数据
	TVideoFream VideoFreamCach[CACHEVIDEOFREMCNT];//存放缓存的视频数据,缓存5帧视频,累计大于5帧开始丢弃
	unsigned int videoLastTime ;
	unsigned int audioLastTime ;
	int videoAddTime ;//VIDEOTIMEINTERVAL;
	int audioAddTime ;//AUDIOTIMEINTERVAL;
	double videoFPS ;
	int fpsFlag ;
	int audioFlag ;
	int audioTmp ;
	int nFindPFream ;
	int iNeedAudioFream ;
	int iNeedVideoFream ;
	unsigned int uiVideoCnt ;
	unsigned int uiAudioCnt ;
	unsigned int uiWritVideoCnt ;//写入 Ts中的视频帧个数
	int iCachVideoCnt ;//缓存的视频帧个数
	unsigned int iWritAudioCnt ;
	unsigned int iExistAudioFream ;//存在 音频帧
	unsigned int uiCurWtsTime ; //记录当前音视频写入 Ts 的时间戳
	int need_i_frame ;
	int g_iWriteTsPCMFileFd ;
}es2tsParam;
typedef struct
{
    int maxPacketSize;
    int encid;
    int encrypt;
    int ts_id;
    int prog_num;
    int pmt_pid;
    int pcr_pid;
    int vid_pid;
    int aud_pid;
    int cbr;
    int cbr_pkt_rate;
    int pcr_interval;
    int cbr_bit;
    int vbitrate;
    int fps;
} TSWRITER_CONFIG;

typedef struct
{
    TSWRITER_CONFIG cfg;

    int video_audio;
    int pat_present;
    int pat_cnt;
    int pmt_cnt;
    int pcr_cnt;
    int *pcr_cnt_ptr;
    int vid_cnt;
    int aud_cnt;
    int spsFound;
    int samplingFrequencyIndex;
    int channelConfiguration;

    int first_pcr;
    int avg_count;
    int pkt_count;
    unsigned int pcr;
    unsigned int pcr_high;
    int accum_pkt;	// number of packets accum
    int frame_pkt;

} TSWRITER_HANDLE;

unsigned int sync_timestamp_a_to_v(unsigned timestamp, int samplingFrequencyIndex);
int store_ts_from_es(unsigned char *outPut,unsigned int outPtrLen, unsigned char *esbuf, int boxSize, unsigned int sample_type, unsigned int timestamp, int samplingFrequencyIndex = 0, int channelConfiguration = 0);
int sampling_frequeny_change(long fre);
int init_e2t_param(es2tsParam * e2tPtr);
int m_es2ts(es2tsParam * e2tPtr ,unsigned char *data,unsigned int dataLen,unsigned int frameType, unsigned int timestamp,unsigned char *outPut,unsigned int outPutLen);
void InitSocket(int *UdpSocket, int iPort,char * ethName,char * MulticIp);
int send_ts_stream(int socket,unsigned char * sendPtr,int sendLen);

cpp文件

#include "es2ts.h"
#include "g711.h"
#include <net/if.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <sys/time.h>

typedef unsigned int DWORD32, *PDWORD32;
typedef unsigned long       DWORD;
typedef unsigned char       BYTE;

typedef enum 
{
    STREAM_FRAME_RAW_AUDIO = 3,
    STREAM_FRAME_H264_IDR,
    STREAM_FRAME_H264_I,
    STREAM_FRAME_H264_P,
    STREAM_FRAME_H264_B,
    STREAM_FRAME_H264_SPS,
    STREAM_FRAME_H264_PPS,
    STREAM_FRAME_JPEG,
    STREAM_FRAME_G711,
    STREAM_FRAME_AAC,
    STREAM_FRAME_MPEG4_I,
    STREAM_FRAME_MPEG4_P,
    STREAM_FRAME_MPEG4_B,
    STREAM_FRAME_MPEG4_VOL,
    STREAM_FRAME_MOTION_VALUES = 18,
    STREAM_FRAME_RAW_VIDEO,
    STREAM_FRAME_H264_SVC_SEI,
    STREAM_FRAME_H264_SVC_PREFIX,
    STREAM_FRAME_H264_SVC_SUBSET_SPS,
    STREAM_FRAME_H264_SVC_SLICE_SCALABLE,
    STREAM_FRAME_MPEG2_I,
    STREAM_FRAME_MPEG2_P,
    STREAM_FRAME_MPEG2_B,
    STREAM_FRAME_CMD_RESPONSE,
    STREAM_FRAME_JPEG_SNAPSHOT,
    STREAM_FRAME_APP_ANALYTIC = 90,
    STREAM_FRAME_VIDEO_ENCODED_PRIMARY = 100,
    STREAM_FRAME_VIDEO_ENCODED_SECONDARY,
    STREAM_FRAME_AUDIO_ENCODED,
    STREAM_FRAME_ANALYTIC,
    STREAM_FRAME_RAW_VIDEO_SECONDARY,
    STREAM_FRAME_VIDEO_ENCODED,
    STREAM_FRAME_RTP_PACKAGE_H264 = 150,
    STREAM_FRAME_RTP_PACKAGE_MPEG4V,
    STREAM_FRAME_RTP_PACKAGE_MJPEG,
    STREAM_FRAME_RTP_PACKAGE_MP2T,
    STREAM_FRAME_RTP_PACKAGE_G711A = 180,
    STREAM_FRAME_RTP_PACKAGE_G711U,
    STREAM_FRAME_RTP_PACKAGE_L16,
    STREAM_FRAME_RTP_PACKAGE_AAC,
    STREAM_FRAME_RTP_PACKAGE_G726,
    STREAM_FRAME_RTP_PACKAGE_MPA,
    STREAM_FRAME_TS_PACKAGE,
    STREAM_FRAME_DOWNLOAD_PROGRESS_INDICATOR = 254,       // 下载进度提示
    STREAM_FRAME_INVALID = 255
}Stream_Frame_type;
#define DBG_MSG_ON 0



static unsigned char pat_pkt[] = {
  0x47, 0x40, 0x00, 0x10, 0x00, 0x00, 0xB0, 0x0D, 0x59, 0x81, 0xEB, 0x00, 0x00, 0x00, 0x01, 0xE0,
  0x42, 0x5E, 0x44, 0x05, 0x9A, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF};

static unsigned char pmt_pkt_va[] = {
  0x47, 0x40, 0x42, 0x10, 0x00, 0x02, 0xb0, 0x2d, 0x00, 0x01, 0xd5, 0x00, 0x00, 0xe0, 0x44, 0xf0,
  0x0c, 0x05, 0x04, 0x48, 0x44, 0x4d, 0x56, 0x88, 0x04, 0x0f, 0xff, 0xfc, 0xfc, 0x1b, 0xe0, 0x44,
  0xf0, 0x00, 0x80, 0xe0, 0x45, 0xf0, 0x0a, 0x05, 0x08, 0x48, 0x44, 0x4d, 0x56, 0x00, 0x80, 0x61,
  0x40, 0x8c, 0xd6, 0x37, 0xbe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
  0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
  0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
  0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
  0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
  0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
  0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
  0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
  0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff };
  

static unsigned char pmt_pkt_v[] = {
  0x47, 0x40, 0x42, 0x10, 0x00, 0x02, 0xB0, 0x12, 0x00, 0x01, 0xD5, 0x00, 0x00, 0xE0, 0x44, 0xF0,
  0x00, 0x1B, 0xE0, 0x44, 0xF0, 0x00, 0xFE, 0x8A, 0x98, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
  0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF };

static int sampling_frequeny[] = {96000, 88200, 64000, 48000, 
								44100, 32000, 24000, 22050,
								16000,  2000, 11025,  8000};

static DWORD32 crc32_table[256];
static unsigned char * ptsbuf = NULL;

unsigned int sync_timestamp_a_to_v(unsigned timestamp, int samplingFrequencyIndex){
	double tmp = 0;

	if(samplingFrequencyIndex <= 0xb)
		samplingFrequencyIndex = sampling_frequeny[samplingFrequencyIndex];
	tmp = 90 * ( ((double)timestamp) / (((double)samplingFrequencyIndex) / 1000));

	return (unsigned int)tmp;
}

int isIFrame(unsigned char frametype)
{
	if(frametype == 4 || frametype == 5)
		return 1;
	return 0;
}

int DateType(int frametype)
{
	switch(frametype)
	{
		case 4:
		case 5:
		case 6:
		case 7:	
			return DATA_TYPE_VIDEO;
		case 11:
			return DATA_TYPE_AUDIO;
		default:
			printf("frame_type unknown:%d\n", frametype);
			return DATA_TYPE_UNKNOWN;
	}
}

static void init_crc32_table()
{
        DWORD32 i, j, k;

        for( i = 0; i < 256; i++ )
        {
            k = 0;
            for( j = (i << 24) | 0x800000; j != 0x80000000; j <<= 1 )
                k = (k << 1) ^ (((k ^ j) & 0x80000000) ? 0x04c11db7 : 0);

            crc32_table[i] = k;
        }
}

DWORD32 crc32(DWORD32 _crc, const BYTE *buf, DWORD32 len)
{
	DWORD32 i_crc = _crc ^ 0xffffffff;
	unsigned int i;
	static int init=0;
	if (!init) {
		init_crc32_table();
		init = 1;
	}
	for (i = 0; i < len; i++)
		i_crc = (i_crc << 8) ^ crc32_table[((i_crc >> 24) ^ buf[i]) & 0xff];

	return i_crc;
}


TSWRITER_HANDLE *tsWriterCreate()
{
		TSWRITER_HANDLE *h;
	    if(!(h = (TSWRITER_HANDLE *)malloc(sizeof(TSWRITER_HANDLE))))
	        return NULL;

	    h->video_audio = 1;
	    h->pat_present = 0;
	    h->pat_cnt = 0;
	    h->pmt_cnt = 0;
	    h->pcr_cnt = 0;
	    h->vid_cnt = 0x0F;
	    h->aud_cnt = 0;

    // mark sps not found yet
	    h->spsFound = 0;

    // mark header as not found
	    h->samplingFrequencyIndex = -1;
	    h->channelConfiguration = -1;

    // bitrate count
	    h->first_pcr = 1;
	    h->avg_count = 0;
	    h->pkt_count = 0;
	    h->accum_pkt = 0;
	    h->frame_pkt = 0;

		ptsbuf = (ptsbuf) ? ptsbuf : (unsigned char*)malloc(MAX_SIZE_SAVE_TS);	

		h->cfg.maxPacketSize = DEF_TS_PKT_LEN;
		h->cfg.encid = 0;
		h->cfg.encrypt = 0;
		h->cfg.ts_id = 0x5981;
		h->cfg.prog_num = 1;
		h->cfg.pmt_pid = 0x42;
		h->cfg.pcr_pid = 0;
		h->cfg.vid_pid = 0x44;
		h->cfg.aud_pid = 0x45;
		h->cfg.cbr = 0;
		h->cfg.cbr_pkt_rate = 99;//0
		h->cfg.pcr_interval = 3003;//0
		h->cfg.cbr_bit = 0; 
		h->cfg.vbitrate = 3000000;//0
		h->cfg.fps = 30;//0

		if (h->cfg.pcr_pid == 0 || !h->cfg.cbr) {
    		h->cfg.pcr_pid = h->cfg.vid_pid;
	    		h->pcr_cnt_ptr = &h->vid_cnt;
	    } else {
	        h->pcr_cnt_ptr = &h->pcr_cnt;
	    }
	    if (h->cfg.cbr) {
	        h->avg_count = h->cfg.cbr_pkt_rate;
		    h->pkt_count = h->avg_count - 2;
		}
		return h;
}
int sampling_frequeny_change(long fre){
	switch(fre){
		case 96000:
			return 0x0;
		case 88200:
			return 0x1;
		case 64000:
			return 0x2;
		case 48000:
			return 0x3;
		case 44100:
			return 0x4;
		case 32000:
			return 0x5;
		case 24000:
			return 0x6;
		case 22050:
			return 0x7;
		case 16000:
			return 0x8;
		case 2000:
			return 0x9;
		case 11025:
			return 0xa;
		case 8000:
			return 0xb;
		default:
			return 0xc;
	}
}


int store_ts_from_es(unsigned char *outPut,unsigned int outPtrLen, unsigned char *esbuf, int boxSize, unsigned int frametype, unsigned int timestamp, int samplingFrequencyIndex, int channelConfiguration)
{
	if(!outPut)
		return -1; //ERR:no file...

	unsigned int sampleSize;
	unsigned int cts;
	unsigned int sample_type,sample_flags = 0;
	unsigned char *cur, *tsptr, *tspkt;
	int first;	// first piece of frame
	int payload_size, adaptsize;
	
	static unsigned cts_high = 0x0;
	static unsigned last_timestamp = 0;

	sampleSize = boxSize;//视频大小

	sample_type = DateType(frametype);
	sample_flags |= isIFrame(frametype);
	cts = timestamp;
	//if(cts == last_timestamp)
	//	cts++;

	//samplingFrequencyIndex = (samplingFrequencyIndex>=0x10)?sampling_frequeny_change(samplingFrequencyIndex):samplingFrequencyIndex;
	if(timestamp < last_timestamp && timestamp - last_timestamp <= 100)
		cts_high ^=  0x1;
	last_timestamp = timestamp;

	cur = esbuf;
	
    // malloc a handle
	static TSWRITER_HANDLE *h = NULL;
	if(h == NULL){
		if((h = tsWriterCreate()) == NULL)
			return -2;
	}
	h->video_audio = 1;
	tsptr = ptsbuf;

	if (sample_type == DATA_TYPE_VIDEO) {
        
		if (h->cfg.cbr == 1 || (sample_flags & 1)) 
		{
			unsigned int c;

			//开始组装 PAT 标号
			memcpy(tsptr, pat_pkt, 188);

			tsptr[3] = (tsptr[3] & 0xF0) | h->pat_cnt;
            h->pat_cnt = (h->pat_cnt + 1) & 0x0F;
			tsptr[8] = h->cfg.ts_id >> 8;
			tsptr[9] = h->cfg.ts_id & 0xFF;

			//填充节目号
			tsptr[13] = h->cfg.prog_num >> 8;
			tsptr[14] = h->cfg.prog_num & 0xFF;

			//PMT ID
			tsptr[15] = 0xE0 | (h->cfg.pmt_pid >> 8);
			tsptr[16] = h->cfg.pmt_pid & 0xFF; 

			//填充校验位
			c = crc32(0, tsptr+5, 12);
			tsptr[17] = c >> 24;
			tsptr[18] = (c >> 16) & 0xFF;
			tsptr[19] = (c >> 8) & 0xFF;
			tsptr[20] = c & 0xFF; //四个字节的校验位
			tsptr += 188; //PAT 包组装完成
			h->accum_pkt ++;
			h->pkt_count ++;

			if (h->cfg.cbr == 1 && h->pkt_count >= h->avg_count) 
			{
				unsigned int new_pcr;

				*tsptr++ = 0x47;
				*tsptr++ = h->cfg.pcr_pid>>8;
				*tsptr++ = h->cfg.pcr_pid&0xFF;
				*tsptr++ = 0x20|*h->pcr_cnt_ptr;
				*tsptr++ = 0xB7;
				*tsptr++ = 0x10;	// PCR
				*tsptr++ = (h->pcr_high<<7)|(h->pcr>>25);
				*tsptr++ = (h->pcr>>17)&0xFF;
				*tsptr++ = (h->pcr>>9)&0xFF;
				*tsptr++ = (h->pcr>>1)&0xFF;
				*tsptr++ = 0x7E;
				*tsptr++ = 0x00;
				memset(tsptr, 0xFF, 188-12);
				tsptr += 188-12;
				h->pkt_count = 0;

				new_pcr = h->pcr + h->cfg.pcr_interval;
				if (new_pcr < h->pcr)
					h->pcr_high = h->pcr_high ^ 1;
				h->pcr = new_pcr;
			}

			//开始组装 PMT 包
			memcpy(tsptr, h->video_audio ? pmt_pkt_va : pmt_pkt_v, 188);
			tsptr[1] = 0x40 | (h->cfg.pmt_pid >> 8);
			tsptr[2] = h->cfg.pmt_pid & 0xFF;
			tsptr[3] = (tsptr[3] & 0xF0) | h->pmt_cnt;
			h->pmt_cnt = (h->pmt_cnt + 1) & 0x0F;

			if (h->video_audio)
			{
				//PAT中填写的节目号
				tsptr[8] = h->cfg.prog_num >> 8;
				tsptr[9] = h->cfg.prog_num & 0xFF; 

				//PCR ID
				tsptr[13] = 0xE0 | (h->cfg.pcr_pid >> 8);
				tsptr[14] = h->cfg.pcr_pid & 0xFF;

				//video ID
				tsptr[18 + DATELEN1] = 0xE0 | (h->cfg.vid_pid >> 8); 
				tsptr[19 + DATELEN1] = h->cfg.vid_pid & 0xFF;

				//audio ID
				tsptr[23 + DATELEN1] = 0xE0 | (h->cfg.aud_pid >> 8);
				tsptr[24 + DATELEN1] = h->cfg.aud_pid & 0xFF;
				
				//填充校验位
				c = crc32(0, tsptr + 5, PMTLEN); 
				tsptr[27 + DATELEN] = c >> 24;
				tsptr[28 + DATELEN] = (c >> 16) & 0xFF;
				tsptr[29 + DATELEN] = (c >> 8) & 0xFF;
				tsptr[30 + DATELEN] = c & 0xFF;
			}
			else
			{
				tsptr[8] = h->cfg.prog_num >> 8;
				tsptr[9] = h->cfg.prog_num & 0xFF;
				tsptr[13] = 0xE0 | (h->cfg.pcr_pid >> 8);
				tsptr[14] = h->cfg.pcr_pid & 0xFF;
				tsptr[18] = 0xE0 | (h->cfg.vid_pid >> 8);
				tsptr[19] = h->cfg.vid_pid & 0xFF;
				c = crc32(0, tsptr+5, 17);
				tsptr[22] = c >> 24;
				tsptr[23] = (c >> 16) & 0xFF;
				tsptr[24] = (c >> 8) & 0xFF;
				tsptr[25] = c & 0xFF;
			}
			h->pat_present = 1;
			tsptr += 188;

			h->accum_pkt += 1;
			h->pkt_count += 1;
		}

		/* PCR */
		if (h->cfg.cbr == 1)
		{
			if (h->first_pcr)
			{
				h->pcr_high = cts_high ^ 1;
				h->pcr = cts - h->cfg.pcr_interval * 4;
				h->first_pcr = 0;
			}
		}

		/* Count NALs */
		/*
		num_nal = 0;
		size = sampleSize;
		while (size >= 4) {
			nalsize = (cur[0]<<24)|(cur[1]<<16)|(cur[2]<<8)|cur[3];
			nalsize_tab[num_nal++] = nalsize;
			cur += 4 + nalsize;
			size -= 4 + nalsize;
		}
		sampleSize -= size;*/
		cur = esbuf;
		first = 1;

		while (sampleSize > 0)
		{
			if (h->cfg.cbr == 1 && h->pkt_count >= h->avg_count)
			{
				unsigned int new_pcr;

				*tsptr++ = 0x47;
				*tsptr++ = h->cfg.pcr_pid>>8;
				*tsptr++ = h->cfg.pcr_pid&0xFF;
				*tsptr++ = 0x20|*h->pcr_cnt_ptr;
				*tsptr++ = 0xB7;
				*tsptr++ = 0x10;	// PCR
				*tsptr++ = (h->pcr_high<<7)|(h->pcr>>25);
				*tsptr++ = (h->pcr>>17)&0xFF;
				*tsptr++ = (h->pcr>>9)&0xFF;
				*tsptr++ = (h->pcr>>1)&0xFF;
				*tsptr++ = 0x7E;
				*tsptr++ = 0x00;
				memset(tsptr, 0xFF, 188-12);
				tsptr += 188-12;
				h->pkt_count = 0;

				new_pcr = h->pcr + h->cfg.pcr_interval;
				if (new_pcr < h->pcr)
					h->pcr_high = h->pcr_high ^ 1;
				h->pcr = new_pcr;
			}

			tspkt = tsptr;
			payload_size = sampleSize;
			
			if (first && h->cfg.cbr != 1) 
			{
				adaptsize = 8;		// PCR
				payload_size += 19;	// PTS
			}
			else
			{
				adaptsize = 0;
			}
			
			if (4 + adaptsize + payload_size >= TS_PKT_LEN)
			{
				payload_size = TS_PKT_LEN - 4 - adaptsize;	// shrink payload
			}
			else
			{
				adaptsize = TS_PKT_LEN - 4 - payload_size;	// padding
			}

			//组 TS 头 4字节
			*tsptr++ = 0x47;
			*tsptr++ = first ? (0x40|(h->cfg.vid_pid>>8)) : (h->cfg.vid_pid>>8);
			*tsptr++ = h->cfg.vid_pid&0xFF;
			h->vid_cnt = (h->vid_cnt+1)&0x0F;
			*tsptr++ = (adaptsize > 0 ? 0x30 : 0x10)|h->vid_cnt;

			if (adaptsize > 0)
			{
				*tsptr++ = adaptsize - 1;

				//组自适应数据
				if (first && h->cfg.cbr != 1)
				{
					*tsptr++ = 0x10;	// PCR
					*tsptr++ = (cts_high<<7)|(cts>>25);
					*tsptr++ = (cts>>17)&0xFF;
					*tsptr++ = (cts>>9)&0xFF;
					*tsptr++ = (cts>>1)&0xFF;
					*tsptr++ = 0x7E;
					*tsptr++ = 0x00;

					adaptsize -= 8;
				}
				else
				{	// just padding
					adaptsize--;
					if (adaptsize > 0)
					{
						*tsptr++ = 0x00;
						adaptsize--;
					}
				}
    			while (adaptsize-- > 0)
    				*tsptr++ = 0xFF;
			}

			//帧头组装 PES 包
			if (first) 
			{
				unsigned int pts, pts_high;
				pts = cts + h->cfg.pcr_interval*4;
				//*ppts = pts;

				if (pts < cts)
					pts_high = cts_high ^ 1;
				else
					pts_high = cts_high;

				//组 PES
				*tsptr++ = 0x00;
				*tsptr++ = 0x00;
				*tsptr++ = 0x01; //PES 包头
				*tsptr++ = 0xE0; //流 ID
				*tsptr++ = 0;
				*tsptr++ = 0; //PES一帧数据的总长度,可以填充0
				*tsptr++ = 0x80;
				*tsptr++ = 0xC0;
				*tsptr++ = 0x0A; //后面字节的个数

				// PTS
				*tsptr++ = 0x30|(pts_high<<3)|((pts>>29)&0x06)|1;
				*tsptr++ = (pts>>22);
				*tsptr++ = (pts>>14)|1;
				*tsptr++ = (pts>>7);
				*tsptr++ = (pts<<1)|1;

				// DTS
				*tsptr++ = 0x10|(pts_high<<3)|((pts>>29)&0x06)|1;
				*tsptr++ = (pts>>22);
				*tsptr++ = (pts>>14)|1;
				*tsptr++ = (pts>>7);
				*tsptr++ = (pts<<1)|1;
				payload_size -= 19;
			}

			if (tsptr - tspkt + payload_size != TS_PKT_LEN)
				printf("Error: payload_size = %d\n", payload_size);

			//printf("aa adaptsize = %d, payload_size = %d, tsptr = %p, h->cfg.vid_pid = 0x%x, sampleSize = %d\n", adaptsize, payload_size, tsptr, h->cfg.vid_pid, sampleSize);
			memcpy(tsptr, cur, payload_size);
			/*
			// write NAL start code
			while (curnal < num_nal) {
				while (scindex < 4 && scoff < payload_size) {
					tsptr[scoff] = (scindex == 3);
					scindex++;
					scoff++;
				}
				if (scoff >= payload_size) {
					scoff -= payload_size;
					break;
				}
				scoff += nalsize_tab[curnal];
				curnal++;
				scindex = 0;
			}*/
			tsptr += payload_size;
			cur += payload_size;
			sampleSize -= payload_size;
			first = 0;

			if (h->cfg.cbr == 1)
			{
				h->pkt_count++;
				h->accum_pkt++;
				h->frame_pkt++;
			}
		}

		if (h->cfg.cbr == 1)
		{
			while (h->accum_pkt < h->avg_count)
			{
				*tsptr++ = 0x47;
				*tsptr++ = h->cfg.vid_pid>>8;
				*tsptr++ = h->cfg.vid_pid&0xFF;
				*tsptr++ = 0x20|h->vid_cnt;
				*tsptr++ = 0x83;
				*tsptr++ = 0x00;
				memset(tsptr, 0xFF, 188-6);
				tsptr += 188-6;
				h->accum_pkt++;
				h->pkt_count++;
			}
			h->accum_pkt -= h->avg_count;
			h->frame_pkt = 0;
		}

		if (h->cfg.cbr == 1 && h->pkt_count >= h->avg_count) 
		{
			unsigned int new_pcr;

			*tsptr++ = 0x47;
			*tsptr++ = h->cfg.pcr_pid>>8;
			*tsptr++ = h->cfg.pcr_pid&0xFF;
			*tsptr++ = 0x20|*h->pcr_cnt_ptr;
			*tsptr++ = 0xB7;
			*tsptr++ = 0x10;	// PCR
			*tsptr++ = (h->pcr_high<<7)|(h->pcr>>25);
			*tsptr++ = (h->pcr>>17)&0xFF;
			*tsptr++ = (h->pcr>>9)&0xFF;
			*tsptr++ = (h->pcr>>1)&0xFF;
			*tsptr++ = 0x7E;
			*tsptr++ = 0x00;
			memset(tsptr, 0xFF, 188-12);
			tsptr += 188-12;
			h->pkt_count = 0;

			new_pcr = h->pcr + h->cfg.pcr_interval;
			if (new_pcr < h->pcr)
				h->pcr_high = h->pcr_high ^ 1;
			h->pcr = new_pcr;
		}
	} 
	else if (sample_type == DATA_TYPE_AUDIO){ 		
		if (!h->pat_present)
		{
			return -3;
		}
		
		int frameSize = 0;
		int bitsPerSample = -1; //位率
		int m_channelConfiguration = -1;  //频道
		int m_samplingFrequencyIndex = -1;//采样率索引

		//手动赋值 采样率 频道 位率
		bitsPerSample = 1; //16位
		m_channelConfiguration = 3;  //双声道
		m_samplingFrequencyIndex = 1;//采样率 1==48K
		
		if ((-1 == m_samplingFrequencyIndex) || (-1 == m_channelConfiguration) || (-1 == bitsPerSample)) 
		{
			printf("No sampleFreq or channelConfiguration or bitsPerSample\n");
			return -4;
		}

		first = 1;
		frameSize = sampleSize + LPCMPACKTLEN;// add LPCM HDMV Header(4 bytes)

		while (sampleSize > 0) 
		{
			payload_size = sampleSize;
			
			if (first) //This Audio fream is separated First Packet 
			{
				payload_size += (PCMAUDIOPETPACKETLEN + LPCMPACKTLEN);
			}

			if (4 + payload_size >= TS_PKT_LEN)
			{
				payload_size = TS_PKT_LEN - 4;
			}

			adaptsize = TS_PKT_LEN - 4 - payload_size;
			*tsptr++ = 0x47;
			*tsptr++ = first ? (0x40|(h->cfg.aud_pid >> 8)) : (h->cfg.aud_pid>>8);
			*tsptr++ = h->cfg.aud_pid & 0xFF;
			*tsptr++ = (adaptsize > 0 ? 0x30 : 0x10) | h->aud_cnt;
			h->aud_cnt = (h->aud_cnt+1)&0x0F;

			if (adaptsize > 0) 
			{
				adaptsize--;
				*tsptr++ = adaptsize;

				if (adaptsize > 0)
				{
					if(adaptsize >= 7 && first == 1)
					{
						*tsptr++ = 0x10;
						adaptsize--;
						// PCR info
						*tsptr++ = (cts_high <<7 ) | (cts >> 25);
						*tsptr++ = (cts >> 17) & 0xFF;
						*tsptr++ = (cts >> 9) & 0xFF;
						*tsptr++ = (cts >> 1) & 0xFF;
						*tsptr++ = 0x7E;
						*tsptr++ = 0x00;
						adaptsize -= 6;
					}
					else
					{
						*tsptr++ = 0;
						adaptsize--;
					}
					while (adaptsize-- > 0)
						*tsptr++ = 0xFF;
				}
			}

			if (first)
			{
				unsigned int pts, pts_high;
				pts = cts + h->cfg.pcr_interval*4;
				//*ppts = pts;
				if (pts < cts)
					pts_high = cts_high ^ 1;
				else
					pts_high = cts_high;

				//PES
				*tsptr++ = 0x00;
				*tsptr++ = 0x00;
				*tsptr++ = 0x01; //PES包起始前缀
				*tsptr++ = 0xC0; //流ID
				*tsptr++ = ((frameSize + 8) >> 8)&0xFF;
				*tsptr++ = (frameSize + 8)&0xFF; //PES 包长度
				*tsptr++ = 0x80; //默认规定
				*tsptr++ = 0x80; //只有PTS
				*tsptr++ = 0x05; //PES头数据长度,也就是 后面还有5个字节

				// PTS
				*tsptr++ = 0x20|(pts_high<<3)|((pts>>29)&0x06)|1;
				*tsptr++ = (pts>>22);
				*tsptr++ = (pts>>14)|1;
				*tsptr++ = (pts>>7);
				*tsptr++ = (pts<<1)|1;
				
				// LPCM HDMV Header//bitsPerSample//channel//samplingFrequencyIndex
				frameSize -= LPCMPACKTLEN;
				*tsptr++ = (frameSize >>8 )&0xff;
				*tsptr++ =  frameSize&0xff;
				*tsptr++ =(m_channelConfiguration&0x0f) << 4|(m_samplingFrequencyIndex&0x0f);
				*tsptr++ = ((bitsPerSample&0x3) << 6) & 0xc0;

				payload_size -= (PCMAUDIOPETPACKETLEN + LPCMPACKTLEN);
			}

			memcpy(tsptr, cur, payload_size);
			tsptr += payload_size;
			cur += payload_size;
			sampleSize -= payload_size;
			first = 0;
		}

	} 
	else
		return -5;

	//return 1;
	int ret = tsptr - ptsbuf;
	if(outPtrLen < ret)
	{
		return -6;
	}
	memcpy(outPut,ptsbuf,ret);
	/*int ret = 0; 
	ret	= fwrite(ptsbuf, 1, tsptr - ptsbuf, file);
	fflush(file);*/	
	return ret;
} 

static void G711ChangeToPcm(unsigned char *pcG711Buf, short *pcPcmBuf, int iDateLen)
{
	g711_t *g711 = NULL;
	g711_config_t config;
	config.law = G711_ALAW;
	config.num_channels = 1;
	config.channel_select = 0;
	
	g711_open(&config, &g711);
	g711_decode(g711, iDateLen, pcG711Buf, pcPcmBuf);
	g711_close(g711);
}


/*********************************
函数作用:调整音频采样率 , 原始采样率是8K
参数: 
		iFrequency = 1--> 48000
		iFrequency = 4--> 96000
		iFrequency = 5--> 192000
**********************************/
static void ReSampFrequency(unsigned char *pcPcmBuf, unsigned char *pcReSampPcmBuf, int iDateLen, int iFrequency)
{
	int m = 0;
	int n = 0;
	
	if (1 == iFrequency)//8K 采样率变 48K 采样率
	{
		unsigned char *cur_p48 = pcPcmBuf;
		
		for (n = 0; n < iDateLen; n += 2)
		{
			for (m = 0; m < 12; m += 2)
			{
				memcpy(pcReSampPcmBuf + m + n * 6, cur_p48 + n, 2);
			}
		}
	}
}


//单通道变双声道
static void PcmSingleChnToDoubleChn(unsigned char *pc48KPcmBuf, unsigned char *pc48KTwoChnPcmBuf)
{
	int i = 0;
	unsigned char *cur_p = pc48KPcmBuf;
	
	while (((i + 3) < (4096 * 6)) && cur_p)
	{
		pc48KTwoChnPcmBuf[i + 0] = *cur_p;
		pc48KTwoChnPcmBuf[i + 2] = *cur_p;
		cur_p++;
		pc48KTwoChnPcmBuf[i + 1] = *cur_p;
		pc48KTwoChnPcmBuf[i + 3] = *cur_p;
		cur_p++;
		i += 4;
	}
}

//返回音频数据的长度,pbs:音频数据指针, len:原始音频数据长度 
static int Dealwith8KSigChnG711Audio(unsigned char *pbs, unsigned int len, unsigned short *pc48KTwoChnPcmBigEndianBuf, 
                                     TAudio48KTwoChnPart *Audio48KTwoChnPcmPart, unsigned int *uiAudioCnt, int iFlag,int audioAddTime)
{
	int i = 0;
	short pcPcmBuf[4096] = {0}; //存放Pcm音频
	unsigned char pc48KSigChnPcmBuf[4096 * 6] = {0};//存放48K单声道Pcm音频
	unsigned char pc48KTwoChnPcmBuf[8192 * 6] = {0};//存放48K双声道Pcm音频
	

	// g711A to Pcm
	memset(pcPcmBuf, 0, sizeof(pcPcmBuf));
	G711ChangeToPcm(pbs, pcPcmBuf, len);
	len = 2 * len; // data * 2


	//8K to 48K frenquency
	memset(pc48KSigChnPcmBuf, 0, sizeof(pc48KSigChnPcmBuf));
	ReSampFrequency((unsigned char *)pcPcmBuf, pc48KSigChnPcmBuf, len, 1);
	len = len * 6;//data * 6

	//Pcm single chn to double chn
	memset(pc48KTwoChnPcmBuf, 0, sizeof(pc48KTwoChnPcmBuf));
	PcmSingleChnToDoubleChn(pc48KSigChnPcmBuf, pc48KTwoChnPcmBuf);
	len = len * 2;//data * 2


	memcpy(pc48KTwoChnPcmBigEndianBuf, pc48KTwoChnPcmBuf, sizeof(pc48KTwoChnPcmBuf));
	for(i = 0; i < (4096 * 3); i++)
	{
		pc48KTwoChnPcmBigEndianBuf[i] = ((pc48KTwoChnPcmBigEndianBuf[i] & 0x00ff) << 8) | ((pc48KTwoChnPcmBigEndianBuf[i] & 0xff00) >> 8);
	}
	
	memset(Audio48KTwoChnPcmPart, 0, 6 * sizeof(TAudio48KTwoChnPart));
	for (i = 0; i < 6; i++) //开始分帧 分6帧
	{
		Audio48KTwoChnPcmPart[i].m_frametype = 11;
		Audio48KTwoChnPcmPart[i].m_iDatalen = len / 6;
		Audio48KTwoChnPcmPart[i].m_uiPartTimeTamp = i * audioAddTime + (*uiAudioCnt - iFlag) * audioAddTime * 6;
		memcpy(Audio48KTwoChnPcmPart[i].m_c48KTwoChnPcmBufPart, (unsigned char *)pc48KTwoChnPcmBigEndianBuf + i * len / 6, Audio48KTwoChnPcmPart[i].m_iDatalen);
		//printf("--------Audio48KTwoChnPcmPart:%d,ts:%u\n",i,Audio48KTwoChnPcmPart[i].m_uiPartTimeTamp);
	}

	if (0 == iFlag)
	{
		(*uiAudioCnt)++;
	}
	
	return len;
}



int init_e2t_param(es2tsParam * e2tPtr )
{
	e2tPtr->videoLastTime = 0;
	e2tPtr->audioLastTime = 0;
	e2tPtr->videoAddTime = 0;
	e2tPtr->audioAddTime = 0;
	e2tPtr->videoFPS = 0;
	e2tPtr->fpsFlag = 0;
	e2tPtr->audioFlag = 0;
	e2tPtr->audioTmp = 0;
	e2tPtr->nFindPFream = 0;
	e2tPtr->iNeedAudioFream = 0;
	e2tPtr->iNeedVideoFream = 1;
	e2tPtr->uiVideoCnt = 0;
	e2tPtr->uiAudioCnt = 0;
	e2tPtr->uiWritVideoCnt = 0;
	e2tPtr->iCachVideoCnt = 0;
	e2tPtr->iWritAudioCnt = 0;
	e2tPtr->iExistAudioFream = 0;	
	e2tPtr->uiCurWtsTime = 0;
	e2tPtr->need_i_frame = 1;
	e2tPtr->g_iWriteTsPCMFileFd = -1;		
}

int m_es2ts(es2tsParam * e2tPtr ,unsigned char *data,unsigned int dataLen,unsigned int frameType, unsigned int timestamp,unsigned char *outPut,unsigned int outPutLen)
{
	int videoTmpFPS = 0;
	double tmpfps;
	int i;
	int len = 0;
	int ret;
	int maxLen = outPutLen;
	if(outPutLen <= 0)
	{
		printf("outPutLen is error\n");
		return -1;
	}
	#if DBG_MSG_ON
	if(e2tPtr->g_iWriteTsPCMFileFd < 0)
	{
		e2tPtr->g_iWriteTsPCMFileFd = open("WriteTsPcm.pcm", O_CREAT | O_WRONLY | O_APPEND, 0666);
		if(e2tPtr->g_iWriteTsPCMFileFd < 0)
		{
			perror("open outaudio file failed:");
		}
	}
	#endif
	if(e2tPtr->need_i_frame == 1)
	{
		if(frameType != STREAM_FRAME_H264_IDR)
		{
		}
		else
		{
			e2tPtr->need_i_frame = 0;
		}
	}
	if (e2tPtr->videoAddTime == 0 && (STREAM_FRAME_H264_IDR== frameType || STREAM_FRAME_H264_P == frameType))//根据时间戳计算音视频的时间戳增量
	{
		videoTmpFPS = timestamp-e2tPtr->videoLastTime;
		if(videoTmpFPS > 0 && videoTmpFPS < 90000)
		{
			printf("videoTmpFPS:%d,now:%u,last:%u\n",videoTmpFPS,timestamp,e2tPtr->videoLastTime);
			tmpfps = 90000/videoTmpFPS;
			/*if(videoTmpFPS <= 25)
			{
				videoTmpFPS = 25;
			}
			else if(videoTmpFPS <= 30)
			{
				videoTmpFPS = 30;
			}
			else if(videoTmpFPS <= 50)
			{
				videoTmpFPS = 50;
			}
			else
			{
				videoTmpFPS = 60;
			}*/
			printf("tmpfps:%f,videoFPS:%d\n",tmpfps,e2tPtr->videoFPS);
			if(tmpfps != e2tPtr->videoFPS)
			{
				e2tPtr->fpsFlag++;
				if(e2tPtr->fpsFlag > 5)
				{
					e2tPtr->videoFPS = tmpfps;
					e2tPtr->videoAddTime = 90000/e2tPtr->videoFPS;
					e2tPtr->fpsFlag = 0;
				}
			}
			else
			{
				e2tPtr->fpsFlag = 0;
			}
		}
		printf("videoAddTime:%d,now:%u,last:%u\n",e2tPtr->videoAddTime,timestamp,e2tPtr->videoLastTime);
		e2tPtr->videoLastTime = timestamp;
		if( e2tPtr->videoAddTime == 0)
		{
			return 0;
		}
	}
	else if (e2tPtr->audioAddTime == 0 && STREAM_FRAME_G711 == frameType)
	{
		e2tPtr->audioTmp = (timestamp-e2tPtr->audioLastTime)*15/8;
		if(e2tPtr->audioAddTime != e2tPtr->audioTmp)
		{
			e2tPtr->audioFlag++;
			if(e2tPtr->audioFlag > 5)
			{
				e2tPtr->audioAddTime =e2tPtr->audioTmp;
				e2tPtr->audioFlag = 0;
			}
		}
		else
		{
			e2tPtr->audioFlag = 0;
		}
		printf("audioAddTime:%u,%u,%u,len:%d\n",e2tPtr->audioAddTime,timestamp,e2tPtr->audioLastTime,dataLen);
		e2tPtr->audioLastTime = timestamp;
		if(e2tPtr->audioAddTime == 0)
		{
			return 0;
		}
	}
	if(e2tPtr->need_i_frame == 1)
	{
		return 0;
	}
	if(e2tPtr->audioAddTime != 0)//由于ts不支持g711格式所以有音频时需要额外处理
	{

                //先存起来音视频帧数据
                if (STREAM_FRAME_H264_IDR== frameType || STREAM_FRAME_H264_P == frameType)
                {

                    if (e2tPtr->iCachVideoCnt < CACHEVIDEOFREMCNT)//缓存视频帧小于5帧直接缓存起来
                    {

                        e2tPtr->VideoFreamCach[e2tPtr->iCachVideoCnt].m_iDatalen = dataLen;
                        e2tPtr->VideoFreamCach[e2tPtr->iCachVideoCnt].m_frametype = frameType;
                        e2tPtr->VideoFreamCach[e2tPtr->iCachVideoCnt].m_uTimeTamp = e2tPtr->uiVideoCnt * e2tPtr->videoAddTime;
                        memcpy(e2tPtr->VideoFreamCach[e2tPtr->iCachVideoCnt].m_cVideoBuf, data, dataLen); 
                        e2tPtr->uiVideoCnt++;
                        e2tPtr->iCachVideoCnt++;
                    }
                    else if ((CACHEVIDEOFREMCNT == e2tPtr->iCachVideoCnt) && (STREAM_FRAME_H264_P == frameType))//已经缓存5帧视频,来了一个p帧,直接扔掉
                    {
                        //printf("Delete One P Video Fream.\n");
			  for(i = 0;i < e2tPtr->iCachVideoCnt-1;i++)
			  {
			  	    ret = store_ts_from_es(outPut+len,maxLen-len, e2tPtr->VideoFreamCach[i].m_cVideoBuf,e2tPtr->VideoFreamCach[i].m_iDatalen, e2tPtr->VideoFreamCach[i].m_frametype, e2tPtr->VideoFreamCach[i].m_uTimeTamp);
				    if(ret >0)
				    {
				    	len += ret;
				    }
				    //tsNetBitstreamWriter.newBlock(handle, VideoFreamCach[i].m_cVideoBuf, VideoFreamCach[i].m_iDatalen, VideoFreamCach[i].m_frametype, VideoFreamCach[i].m_uTimeTamp);
                                e2tPtr->uiCurWtsTime = e2tPtr->VideoFreamCach[i].m_uTimeTamp;
				   e2tPtr->uiWritVideoCnt++;
			  }
			  e2tPtr->iCachVideoCnt= 0;
                    }
                    else if ((CACHEVIDEOFREMCNT == e2tPtr->iCachVideoCnt) && (STREAM_FRAME_H264_IDR== frameType))//已经缓存的5帧视频,来了一个I帧,向前找一个P帧删除
                    {
                        for (i = e2tPtr->iCachVideoCnt - 1; i >= 0; i--)
                        {
                            if(e2tPtr->VideoFreamCach[i].m_frametype == STREAM_FRAME_H264_P)
                            {
                                e2tPtr->nFindPFream = 1;
                                break;
                            }
                        }
                
                        if(1 == e2tPtr->nFindPFream)//如果找到缓存的p帧
                        {
                            for (; i < CACHEVIDEOFREMCNT - 1; i++)
                            {
                                memcpy(&(e2tPtr->VideoFreamCach[i]), &(e2tPtr->VideoFreamCach[i + 1]), sizeof(TVideoFream));
                            }
                            e2tPtr->nFindPFream = 0;
                        }
                
                        memset(&(e2tPtr->VideoFreamCach[CACHEVIDEOFREMCNT - 1]), 0, sizeof(TVideoFream));
                        e2tPtr->VideoFreamCach[CACHEVIDEOFREMCNT - 1].m_iDatalen = dataLen;
                        e2tPtr->VideoFreamCach[CACHEVIDEOFREMCNT - 1].m_frametype = frameType;
                        e2tPtr->VideoFreamCach[CACHEVIDEOFREMCNT - 1].m_uTimeTamp = e2tPtr->uiVideoCnt * e2tPtr->videoAddTime;
                        memcpy(e2tPtr->VideoFreamCach[CACHEVIDEOFREMCNT - 1].m_cVideoBuf, data, dataLen);  
                    }
                    //printf("AAA iCachVideoCnt = %d, uiVideoCnt = %d, m_uTimeTamp = %u\n", iCachVideoCnt, uiVideoCnt, VideoFreamCach[iCachVideoCnt - 1].m_uTimeTamp);
                }
                else if (STREAM_FRAME_G711 == frameType)
                {
			
                    if (1 == e2tPtr->iExistAudioFream) //如果音频没有写完,又来音频
                    {
                        if (0 == e2tPtr->iCachVideoCnt)//当需要视频,来的音频,且视频没有缓存时
                        {
                            e2tPtr->iExistAudioFream = 1;

                            dataLen = Dealwith8KSigChnG711Audio(data, dataLen, e2tPtr->pc48KTwoChnPcmBigEndianBuf, e2tPtr->Audio48KTwoChnPcmPart, &(e2tPtr->uiAudioCnt), 1,e2tPtr->audioAddTime);

				return 0;
                        }
                        while (6 != e2tPtr->iWritAudioCnt)
                        {
                        	if((e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp < e2tPtr->uiCurWtsTime) )
				 {
		 	        #if DBG_MSG_ON
                                unsigned int ret = write(e2tPtr->g_iWriteTsPCMFileFd, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_c48KTwoChnPcmBufPart, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_iDatalen); 
                                if (ret != e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_iDatalen)
                                {
                                    printf("g711 Write TsAudioFile is Error: ret = %d, len = %d\n", ret, dataLen);
                                }
                                #endif
				    ret = store_ts_from_es(outPut+len,maxLen-len, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_c48KTwoChnPcmBufPart, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_iDatalen, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_frametype, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp);
				    if(ret >0)
				    {
				    	len += ret;
				    }
				    //tsNetBitstreamWriter.newBlock(handle, Audio48KTwoChnPcmPart[iWritAudioCnt].m_c48KTwoChnPcmBufPart, 
                                //        Audio48KTwoChnPcmPart[iWritAudioCnt].m_iDatalen, Audio48KTwoChnPcmPart[iWritAudioCnt].m_frametype, Audio48KTwoChnPcmPart[iWritAudioCnt].m_uiPartTimeTamp);          
                                e2tPtr->uiCurWtsTime = e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp;
                                e2tPtr->iWritAudioCnt++;
                                e2tPtr->iNeedAudioFream = 0;
                                e2tPtr->iNeedVideoFream = 1;
				   continue;
				 }
      				//printf("------runing0:%d--%u-%u--%u--\n",iWritAudioCnt,uiCurWtsTime,Audio48KTwoChnPcmPart[iWritAudioCnt].m_uiPartTimeTamp,uiWritVideoCnt * videoAddTime);
                            //当音频时间戳大于当前写TS流时间且小于下一帧视频时间戳时
                            while (e2tPtr->iWritAudioCnt < 6 &&(e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp >= e2tPtr->uiCurWtsTime) 
                                && (e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp <= e2tPtr->uiWritVideoCnt * e2tPtr->videoAddTime))
                            {
                                #if DBG_MSG_ON
                                unsigned int ret = write(e2tPtr->g_iWriteTsPCMFileFd, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_c48KTwoChnPcmBufPart, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_iDatalen); 
                                if (ret != e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_iDatalen)
                                {
                                    printf("g711 Write TsAudioFile is Error: ret = %d, len = %d\n", ret, dataLen);
                                }
                                #endif
                                ret = store_ts_from_es(outPut+len,maxLen-len, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_c48KTwoChnPcmBufPart, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_iDatalen, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_frametype, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp);
				    if(ret >0)
				    {
				    	len += ret;
				    }
				    //tsNetBitstreamWriter.newBlock(handle, Audio48KTwoChnPcmPart[iWritAudioCnt].m_c48KTwoChnPcmBufPart, 
                                //        Audio48KTwoChnPcmPart[iWritAudioCnt].m_iDatalen, Audio48KTwoChnPcmPart[iWritAudioCnt].m_frametype, Audio48KTwoChnPcmPart[iWritAudioCnt].m_uiPartTimeTamp);          
                                e2tPtr->uiCurWtsTime = e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp;
                                e2tPtr->iWritAudioCnt++;
                                e2tPtr->iNeedAudioFream = 0;
                                e2tPtr->iNeedVideoFream = 1;
                                //printf("AA uiCurWtsTime = %u, iWritAudioCnt = %d, uiAudioCnt = %d\n", uiCurWtsTime, iWritAudioCnt, uiAudioCnt);
                            }
    
                            if (6 == e2tPtr->iWritAudioCnt)
                            {
                                break;
                            }
                            else
                            {

                            	  if (e2tPtr->iCachVideoCnt > 0)
                            	  {
                                    if (e2tPtr->VideoFreamCach[0].m_uTimeTamp <= (e2tPtr->uiCurWtsTime + e2tPtr->audioAddTime))
                                    {
                                       ret = store_ts_from_es(outPut+len,maxLen-len,  e2tPtr->VideoFreamCach[0].m_cVideoBuf, e2tPtr->VideoFreamCach[0].m_iDatalen, e2tPtr->VideoFreamCach[0].m_frametype, e2tPtr->VideoFreamCach[0].m_uTimeTamp);
					    if(ret >0)
					    {
					    	len += ret;
					    }
					     //tsNetBitstreamWriter.newBlock(handle, VideoFreamCach[0].m_cVideoBuf, VideoFreamCach[0].m_iDatalen, VideoFreamCach[0].m_frametype, VideoFreamCach[0].m_uTimeTamp);
                                        e2tPtr->uiCurWtsTime = e2tPtr->VideoFreamCach[0].m_uTimeTamp;
                                        memset(&(e2tPtr->VideoFreamCach[0]), 0, sizeof(TVideoFream));
                                        for (i = 0; i <e2tPtr->iCachVideoCnt-1; i++)
                                        {
                                            memcpy(&(e2tPtr->VideoFreamCach[i]), &(e2tPtr->VideoFreamCach[i + 1]), sizeof(TVideoFream));
                                        }
                                        e2tPtr->uiWritVideoCnt++;
                                        e2tPtr->iCachVideoCnt--;
                                        //printf("BB Video has writed uiCurWtsTime = %u, uiVideoCnt = %d, iCachVideoCnt = %d, uiWritVideoCnt = %d\n", uiCurWtsTime, uiVideoCnt, iCachVideoCnt, uiWritVideoCnt);
                                    }
					  e2tPtr->iNeedAudioFream = 1;
					  e2tPtr->iNeedVideoFream = 0;
                            	  }
				  else
				  {
				  	break;
				  }

                            }
                        }
				 if (6 == e2tPtr->iWritAudioCnt)
				 {
                        e2tPtr->iWritAudioCnt = 0;
                        e2tPtr->iExistAudioFream = 0;
				 }
                    }
                
                    if (0 == e2tPtr->iExistAudioFream)
                    {
                        e2tPtr->iExistAudioFream = 1;

                        dataLen = Dealwith8KSigChnG711Audio(data, dataLen, e2tPtr->pc48KTwoChnPcmBigEndianBuf, e2tPtr->Audio48KTwoChnPcmPart, &(e2tPtr->uiAudioCnt), 0,e2tPtr->audioAddTime); 

			}
                }
                //printf("##00 iNeedVideoFream = %d, iNeedAudioFream = %d, uiCurWtsTime = %u\n", iNeedVideoFream, iNeedAudioFream, uiCurWtsTime);

                //开始写 TS 流
                if (e2tPtr->iCachVideoCnt > 0 && 1 == e2tPtr->iNeedVideoFream)
                {
                    if (e2tPtr->VideoFreamCach[0].m_uTimeTamp <= (e2tPtr->uiCurWtsTime + e2tPtr->audioAddTime))
                    {
                    	   ret = store_ts_from_es(outPut+len,maxLen-len,  e2tPtr->VideoFreamCach[0].m_cVideoBuf, e2tPtr->VideoFreamCach[0].m_iDatalen, e2tPtr->VideoFreamCach[0].m_frametype, e2tPtr->VideoFreamCach[0].m_uTimeTamp);
	   		    if(ret >0)
	   		    {
	   		    	len += ret;
	   		    }
			   //tsNetBitstreamWriter.newBlock(handle, VideoFreamCach[0].m_cVideoBuf, VideoFreamCach[0].m_iDatalen, VideoFreamCach[0].m_frametype, VideoFreamCach[0].m_uTimeTamp);
                        e2tPtr->uiCurWtsTime = e2tPtr->VideoFreamCach[0].m_uTimeTamp;
                        memset(&(e2tPtr->VideoFreamCach[0]), 0, sizeof(TVideoFream));
                        for (i = 0; i < e2tPtr->iCachVideoCnt-1; i++)
                        {
                            memcpy(&(e2tPtr->VideoFreamCach[i]), &(e2tPtr->VideoFreamCach[i + 1]), sizeof(TVideoFream));
                        }
                        e2tPtr->uiWritVideoCnt++;
                        e2tPtr->iCachVideoCnt--;
                        //printf("CC Video has writed uiCurWtsTime = %u, uiVideoCnt = %d, iCachVideoCnt = %d, uiWritVideoCnt = %d\n", uiCurWtsTime, uiVideoCnt, iCachVideoCnt, uiWritVideoCnt);
                    }
                    e2tPtr->iNeedAudioFream = 1;
                    e2tPtr->iNeedVideoFream = 0;
                }
                //printf("##11 iNeedVideoFream = %d, iNeedAudioFream = %d, uiCurWtsTime = %u\n", iNeedVideoFream, iNeedAudioFream, uiCurWtsTime);

                if (e2tPtr->uiAudioCnt > 0 && 1 == e2tPtr->iNeedAudioFream && 1 == e2tPtr->iExistAudioFream)
                {
                    while (e2tPtr->iWritAudioCnt < 6 &&(e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp >= e2tPtr->uiCurWtsTime) 
                            && (e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp <= e2tPtr->uiWritVideoCnt * e2tPtr->videoAddTime))
                    {
            	 	    #if DBG_MSG_ON
                        unsigned int ret = write(e2tPtr->g_iWriteTsPCMFileFd, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_c48KTwoChnPcmBufPart, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_iDatalen); 
                        if (ret != e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_iDatalen)
                        {
                            printf("g711 Write TsAudioFile is Error: ret = %d, len = %d\n", ret, dataLen);
                        }
                        #endif
                	  ret = store_ts_from_es(outPut+len,maxLen-len,   e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_c48KTwoChnPcmBufPart,  e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_iDatalen, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_frametype, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp);
	   		    if(ret >0)
	   		    {
	   		    	len += ret;
	   		    }
			   //tsNetBitstreamWriter.newBlock(handle, Audio48KTwoChnPcmPart[iWritAudioCnt].m_c48KTwoChnPcmBufPart, 
                         //       Audio48KTwoChnPcmPart[iWritAudioCnt].m_iDatalen, Audio48KTwoChnPcmPart[iWritAudioCnt].m_frametype, Audio48KTwoChnPcmPart[iWritAudioCnt].m_uiPartTimeTamp);          
                        e2tPtr->uiCurWtsTime = e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp;
                        e2tPtr->iWritAudioCnt++;
                        e2tPtr->iNeedAudioFream = 0;
                        e2tPtr->iNeedVideoFream = 1;
                        //printf("DD uiCurWtsTime = %u, iWritAudioCnt = %d, uiAudioCnt = %d\n", uiCurWtsTime, iWritAudioCnt, uiAudioCnt);
                    }
                    //printf("##22 iNeedVideoFream = %d, iNeedAudioFream = %d, uiCurWtsTime = %u\n", iNeedVideoFream, iNeedAudioFream, uiCurWtsTime);
                
                    //如果还有缓存的视频帧,检查视频是否可写
                    if (e2tPtr->iCachVideoCnt > 0 && 1 == e2tPtr->iNeedVideoFream)
                    {
                        if (e2tPtr->VideoFreamCach[0].m_uTimeTamp <= (e2tPtr->uiCurWtsTime + e2tPtr->audioAddTime))
                        {
				ret = store_ts_from_es(outPut+len,maxLen-len,  e2tPtr->VideoFreamCach[0].m_cVideoBuf, e2tPtr->VideoFreamCach[0].m_iDatalen, e2tPtr->VideoFreamCach[0].m_frametype, e2tPtr->VideoFreamCach[0].m_uTimeTamp);
    	   		      if(ret >0)
    	   		      {
    	   		       	len += ret;
    	   		      }
				//tsNetBitstreamWriter.newBlock(handle, VideoFreamCach[0].m_cVideoBuf, VideoFreamCach[0].m_iDatalen, VideoFreamCach[0].m_frametype, VideoFreamCach[0].m_uTimeTamp);
                            e2tPtr->uiCurWtsTime = e2tPtr->VideoFreamCach[0].m_uTimeTamp;
                            memset(&(e2tPtr->VideoFreamCach[0]), 0, sizeof(TVideoFream));
                            for (i = 0; i < e2tPtr->iCachVideoCnt-1; i++)
                            {
                                memcpy(&(e2tPtr->VideoFreamCach[i]), &(e2tPtr->VideoFreamCach[i + 1]), sizeof(TVideoFream));
                            }
                            e2tPtr->uiWritVideoCnt++;
                            e2tPtr->iCachVideoCnt--;
                            //printf("EE Video has writed uiCurWtsTime = %u, uiVideoCnt = %d, iCachVideoCnt = %d, uiWritVideoCnt = %d\n", uiCurWtsTime, uiVideoCnt, iCachVideoCnt, uiWritVideoCnt);
                        }
                        e2tPtr->iNeedAudioFream = 1;
                        e2tPtr->iNeedVideoFream = 0;
                        //printf("##33 iNeedVideoFream = %d, iNeedAudioFream = %d, uiCurWtsTime = %u\n", iNeedVideoFream, iNeedAudioFream, uiCurWtsTime);
           
                        while (e2tPtr->iWritAudioCnt < 6 && (e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp >= e2tPtr->uiCurWtsTime) && (e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp <= e2tPtr->uiWritVideoCnt * e2tPtr->videoAddTime))
                        {
                    	    #if DBG_MSG_ON
                            unsigned int ret = write(e2tPtr->g_iWriteTsPCMFileFd, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_c48KTwoChnPcmBufPart,e2tPtr-> e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_iDatalen); 
                            if (ret != e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_iDatalen)
                            {
                                printf("g711 Write TsAudioFile is Error: ret = %d, len = %d\n", ret, dataLen);
                            }
                            #endif
                            ret = store_ts_from_es(outPut+len,maxLen-len,  e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_c48KTwoChnPcmBufPart,  e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_iDatalen, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_frametype, e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp);
    	   		      if(ret >0)
    	   		      {
    	   		       	len += ret;
    	   		      }
				//tsNetBitstreamWriter.newBlock(handle, Audio48KTwoChnPcmPart[iWritAudioCnt].m_c48KTwoChnPcmBufPart, 
                            //        Audio48KTwoChnPcmPart[iWritAudioCnt].m_iDatalen, Audio48KTwoChnPcmPart[iWritAudioCnt].m_frametype, Audio48KTwoChnPcmPart[iWritAudioCnt].m_uiPartTimeTamp);          
                            e2tPtr->uiCurWtsTime = e2tPtr->Audio48KTwoChnPcmPart[e2tPtr->iWritAudioCnt].m_uiPartTimeTamp;
                            e2tPtr->iWritAudioCnt++;
                            e2tPtr->iNeedAudioFream = 0;
                            e2tPtr->iNeedVideoFream = 1;
                            //printf("FF uiCurWtsTime = %u, iWritAudioCnt = %d, uiAudioCnt = %d\n", uiCurWtsTime, iWritAudioCnt, uiAudioCnt);
                        }

			//printf("##44 iNeedVideoFream = %d, iNeedAudioFream = %d, uiCurWtsTime = %u\n", iNeedVideoFream, iNeedAudioFream, uiCurWtsTime);
                    }
                
                    if (6 == e2tPtr->iWritAudioCnt)
                    {
                        e2tPtr->iWritAudioCnt = 0;
                        e2tPtr->iExistAudioFream = 0;
                    }
                }
	}
	else
	{	
		  ret = store_ts_from_es(outPut+len,maxLen-len,  data, dataLen, frameType, timestamp);
		if(ret >0)
		{
			len += ret;
		}
                //tsNetBitstreamWriter.newBlock(handle, (char *)data, dataLen, frameType, fileInfo.timestamp);
	}
	return len;
}

void InitSocket(int *UdpSocket, int iPort,char * ethName,char * MulticIp)
{
	int on = 1;
	//struct ip_mreq mreq;                                    /*加入多播组*/
	struct sockaddr_in servaddr;
    struct ifreq ifr;
	if ((*UdpSocket = socket(AF_INET, SOCK_DGRAM, 0)) < 0)
	{
		perror("creat UdpSocket socket failed!");
		return ;
	}
        strcpy(ifr.ifr_name, ethName);
        //SIOCGIFADDR标志代表获取接口地址
        if (ioctl(*UdpSocket, SIOCGIFADDR, &ifr) <  0)
        {
                perror("ioctl");
		 close(*UdpSocket);
		*UdpSocket = -1;
		return ;
        }
        //printf("%s\n", inet_ntoa(((struct sockaddr_in*)&(ifr.ifr_addr))->sin_addr));	
	bzero(&servaddr, sizeof(servaddr));
	servaddr.sin_family = AF_INET;
	servaddr.sin_port = htons(iPort);
	servaddr.sin_addr = ((struct sockaddr_in*)&(ifr.ifr_addr))->sin_addr;

	if((setsockopt(*UdpSocket, SOL_SOCKET, SO_REUSEADDR, &on, sizeof(on))) < 0)  
	{
		perror("set UDP_sock sockopt failed");
		close(*UdpSocket);
		*UdpSocket = -1;
		return ;
	}
	if (bind(*UdpSocket, (struct sockaddr *)&servaddr, sizeof(servaddr)) == -1)
	{
		perror("bind UDP_sock sockopt failed"); 
		close(*UdpSocket);
		*UdpSocket = -1;
		return ;
	}
	servaddr.sin_addr.s_addr = inet_addr ((char *)MulticIp);
	servaddr.sin_port = htons ((unsigned short)iPort);
	if ( connect(*UdpSocket, (struct sockaddr *)&servaddr,sizeof(servaddr)) == -1 )
	{
		perror("connect UDP_sock sockopt failed"); 
		close(*UdpSocket);
		*UdpSocket = -1;
		return ;
	}
}

int send_ts_stream(int socket,unsigned char * sendPtr,int sendLen)
{
	int bufSize = sendLen;
	unsigned char * bufData = sendPtr;
	int udpPayLoad = 0;
	int maxPayload = 1450;
	struct timeval  lastDatatime;        /* Stores the time before last data packet sending */
	struct timeval  nextDatatime;        /* Stores the time before next data packet sending */
	unsigned int  dt_interval = 0;     /* Data packet interval               */
    while(bufSize > 0)
    {

        udpPayLoad   = maxPayload < bufSize ? maxPayload : bufSize;

	#if 1
            gettimeofday(&nextDatatime,NULL);
            dt_interval = ((nextDatatime.tv_usec+nextDatatime.tv_sec*1000000)-(lastDatatime.tv_usec+lastDatatime.tv_sec*1000000));
            if( dt_interval < 160)
            {
				int i;
				/* 相当于睡250微秒 */
				for(i=0;i<43414;i++)
				{
					asm volatile("");
				}
            }
            lastDatatime.tv_usec = nextDatatime.tv_usec;
            lastDatatime.tv_sec  = nextDatatime.tv_sec;
	#endif
	 send(socket,bufData,((udpPayLoad/188) * 188),0);


        bufSize -= ((udpPayLoad/188) * 188);
        bufData += ((udpPayLoad/188) * 188);
    }
}


G711.h

/************************************************************************
Copyright (c) 2007 Stretch, Inc. All rights reserved.  Stretch products
are protected under numerous U.S. and foreign patents, maskwork rights,
copyrights and other intellectual property laws.

This source code and the related tools, software code and documentation,
and your use thereof, are subject to and governed by the terms and
conditions of the applicable Stretch IDE or SDK and RDK License Agreement
(either as agreed by you or found at www.stretchinc.com).  By using
these items, you indicate your acceptance of such terms and conditions
between you and Stretch, Inc.  In the event that you do not agree with
such terms and conditions, you may not use any of these items and must
immediately destroy any copies you have made.
************************************************************************/

/************************************************************************
    PACKAGE: g711 -- G711 Speech Codec

    DESCRIPTION:

    SECTION: Include
    {
    #include "g711.h"
    }

    SECTION: Introduction

    @g711@ is an implementation of the G711 speech coding standard
    optimized for the Stretch S6000 processors.  The codec support 
    both mu law and alaw encoding/decoding.

    SECTION: Usage model

    To use @g711@, a @g711@ encoder/decoder instance must be created first
    using g711_open().  After that, g711_encode() and g711_decode() can be
    called.  When finished, g711_close() must be called to free memories
    associated with the instance.  Due to the simplicify of g711 algorith,
    both encoding and decoding can be done using a single instance.

    This package supports single or multi channel interleaved inputs.
    In the multi-channel case, the API allows the encoding/decoding to
    be performed on a specific channel, without requiring the caller to
    extract the channel first.

    SECTION: Data representation

    Linear data has 16 bits.  Only the most significant 12 bits
    are used for encoding.  Compressed data has 8 bits stored in
    the least significant 8 bits of a short.
************************************************************************/
#ifndef G711_HEADER
#define G711_HEADER

/*******************************************************************************
   A necessary evil introduced for C++ compatibility.  C source files must
   not declare a function "extern"; instead, they must declare the function
   "EXTERN_G711".  For example:
   {
       EXTERN_G711 void my_external_symbol(int a, double f);
   }
   This specifies that the function has C linkage so that it can be used
   when compiled with a C++ compiler.
*******************************************************************************/
#if defined(__cplusplus)
   #define EXTERN_G711              extern "C"
#else
   #define EXTERN_G711              extern
#endif

/************************************************************************
    Return status for g711 routines
************************************************************************/
typedef enum g711_status_enum {
    G711_OK
} g711_status_e;

/************************************************************************
    Type of companding to use for encoding or decoding

    Use G711_ULAW for USA
    Use G711_ALAW for Asia and Europe
************************************************************************/
typedef enum g711_au_enum {
    G711_ALAW,
    G711_ULAW,
} g711_au_e;

/************************************************************************
    Structure used to configure a g711 encoder or decoder instance.
    Both encoding and decoding can use the same instance provided
    that they have the same configuration parameters.

    region (law) = ulaw, Use G711_ALAW for Asia and Europe
    num_channels - number of audio channels for 1 for mono, 2 for stereo
    channel_select - The channel used. First one is 0. Use 0 for both
    Mono and Stereo since in the case of stereo we are just duplicating
    left channel twice.

************************************************************************/
typedef struct g711_config_struct {
    g711_au_e law;
    unsigned char num_channels;
    unsigned char channel_select;
} g711_config_t;

/************************************************************************
    A g711 encoder/decoder handle
************************************************************************/
typedef struct g711_struct g711_t;

/************************************************************************
    GROUP: g711 instance management
************************************************************************/
EXTERN_G711 g711_status_e g711_open(g711_config_t *config, g711_t **g711);
EXTERN_G711 g711_status_e g711_close(g711_t *g711);

/************************************************************************
    GROUP: Encoding and decoding routines
************************************************************************/
EXTERN_G711 g711_status_e g711_encode_mono(g711_t *g711, unsigned num, short *ibuf, unsigned char *obuf);
EXTERN_G711 g711_status_e g711_encode_stereo(g711_t *g711, unsigned num, short *ibuf, unsigned char *obuf);
EXTERN_G711 g711_status_e g711_decode(g711_t *g711, unsigned num, unsigned char *ibuf, short *obuf);
#endif


G711.cpp

/************************************************************************
Copyright (c) 2007 Stretch, Inc. All rights reserved.  Stretch products
are protected under numerous U.S. and foreign patents, maskwork rights,
copyrights and other intellectual property laws.

This source code and the related tools, software code and documentation,
and your use thereof, are subject to and governed by the terms and
conditions of the applicable Stretch IDE or SDK and RDK License Agreement
(either as agreed by you or found at www.stretchinc.com).  By using
these items, you indicate your acceptance of such terms and conditions
between you and Stretch, Inc.  In the event that you do not agree with
such terms and conditions, you may not use any of these items and must
immediately destroy any copies you have made.
************************************************************************/
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include "g711.h"

/*************************************************************************
    A g711 codec handle.  It can be used for encoding or decoding.
*************************************************************************/
struct g711_struct {
    g711_config_t config;
};

/*************************************************************************
    A-law encoding rule according ITU-T Rec. G.711.
*************************************************************************/
static short g711_linear_to_alaw(short in)
{
    short out, ex;

    out = in < 0 ? (~in) >> 4 : in >> 4;

    if (out > 15) {
        ex = 1;
        while (out > 16 + 15) {
            out >>= 1;
            ex++;
        }
        out -= 16;
        out += ex << 4;
    }

    if (in >= 0) {
        out |= (0x0080);
    }

    return out ^ (0x0055);
}

/*************************************************************************
    ALaw decoding rule according ITU-T Rec. G.711.
*************************************************************************/
static short g711_alaw_to_linear(short in)
{
    short tmp, mant, ex;

    tmp = in ^ 0x0055;
    tmp &= (0x007F);
    ex = tmp >> 4;
    mant = tmp & (0x000F);
    if (ex > 0) {
        mant = mant + 16;
    }

    mant = (mant << 4) + (0x0008);
    if (ex > 1) {
      mant = mant << (ex - 1);
    }

    return in > 127 ? mant : -mant;
}

/*************************************************************************
    Mu law encoding rule according ITU-T Rec. G.711.
*************************************************************************/
static short g711_linear_to_ulaw(short in)
{
    short i, out, absno, segno, low_nibble, high_nibble;

    absno = in < 0 ? ((~in) >> 2) + 33 : ((in) >> 2) + 33;

    if (absno > (0x1FFF)) {
      absno = (0x1FFF);
    }

    i = absno >> 6;
    segno = 1;
    while (i != 0)
    {
        segno++;
        i >>= 1;
    }

    high_nibble = (0x0008) - segno;

    low_nibble = (absno >> segno) & (0x000F);
    low_nibble = (0x000F) - low_nibble;

    out = (high_nibble << 4) | low_nibble;

    /* Add sign bit */
    return in >= 0 ? (out | 0x0080) : out;
}

/*************************************************************************
    Mu Law decoding rule according ITU-T Rec. G.711.
*************************************************************************/
static short g711_ulaw_to_linear(short in)
{
    short out, segment, mantissa, exponent, sign, step;

    sign = in < 0x0080 ? -1 : 1;

    mantissa = ~in;
    exponent = (mantissa >> 4) & (0x0007);
    segment = exponent + 1;
    mantissa = mantissa & (0x000F);

    step = (4) << segment;

    out = sign * (((0x0080) << exponent) + step * mantissa + step / 2 - 4 * 33);

    return out;
}

/*************************************************************************
    Initialize and return a g711 codec instance in "*g711".  The 
    instance can be used for encoding as well as decoding as long as
    the configuration parameters are the same.
*************************************************************************/
g711_status_e g711_open(g711_config_t *config, g711_t **g711)
{
    g711_t *hdl;
    hdl = (g711_t *) malloc(sizeof(g711_t)); 
    memcpy(&hdl->config, config, sizeof(g711_config_t));
    *g711 = hdl;

    return G711_OK;
}

/*************************************************************************
    Encode "num" input samples in "ibuf" and store the output in *obuf.
    "ibuf" must hold "num" of shorts and "obuf" "num" of bytes.
    In case of multi-channel data, the "ibuf" should contain "n * num"
    number of interleaved samples where "n" is the number of channels.
    "obuf" is assumed to be sufficient to hold the encoded data.
    If "g711" is configured to have multiple channels and encode
    only one channel, the encoding will only be performed on the
    specific channel and the output is stored in "obuf" contiguously.
*************************************************************************/
g711_status_e g711_encode_mono(g711_t *g711, unsigned num, short *ibuf, unsigned char *obuf)
{
    int out_buf_index, input_buf_index, incr, init, size;

    incr = g711->config.channel_select < g711->config.num_channels ? g711->config.num_channels : 1;
    init = g711->config.channel_select;
    size = num * g711->config.num_channels;

    switch (g711->config.law) {
    case G711_ALAW:
        for (out_buf_index = 0, input_buf_index = init; input_buf_index < size; input_buf_index += incr, out_buf_index++) {
            obuf[out_buf_index] = (unsigned char) g711_linear_to_alaw(ibuf[input_buf_index]);
        }
        break;
    case G711_ULAW:
        for (out_buf_index = 0, input_buf_index = init; input_buf_index < size; input_buf_index += incr, out_buf_index++) {
            obuf[out_buf_index] = (unsigned char) g711_linear_to_ulaw(ibuf[input_buf_index]);
        }
        break;
    default:
        assert(0);
    }

    return G711_OK;
}
g711_status_e g711_encode_stereo(g711_t *g711, unsigned num, short *ibuf, unsigned char *obuf)
{
    int out_buf_index, input_buf_index, incr, init, size;

    incr = g711->config.channel_select < g711->config.num_channels ? g711->config.num_channels : 1;
    init = g711->config.channel_select;
    size = num * g711->config.num_channels;

    switch (g711->config.law) {
    case G711_ALAW:
        for (out_buf_index = 0, input_buf_index = init; input_buf_index < size; input_buf_index += incr, out_buf_index += 2) {
            obuf[out_buf_index] = (unsigned char) g711_linear_to_alaw(ibuf[input_buf_index]);
            obuf[out_buf_index+1] = obuf[out_buf_index];

        }
        break;
    case G711_ULAW:
        for (out_buf_index = 0, input_buf_index = init; input_buf_index < size; input_buf_index += incr, out_buf_index += 2) {
            obuf[out_buf_index] = (unsigned char) g711_linear_to_ulaw(ibuf[input_buf_index]);
            obuf[out_buf_index+1] = obuf[out_buf_index];

        }
        break;
    default:
        assert(0);
    }

    return G711_OK;
}

/****************************************************************************
   This function decodes the 8 bit encoded mono audio buffer in to a 16 bit raw
   PCM audio buffer.

   Parameters:

       g711 -  pointer to g711 handle that was previously opened by the call
               to g711_open()

       num_samples - The number of audio samples in 'src_buf' input buffer.
                     Note that this is not the size of src_buf. In case of two
                     channel stereo, this is half the size of the src_buf

       src_buf -  (Input) A buffer containing the 8 bit encoded mono audio buffer
               to be decoced.

       dest_buf - (Output) A pointer to a buffer to hold the decoded 16 bit raw
              audio. It is assumed that this buffer is at least twice as large as
              the input buffer 'srcBuf' in order to hold the decoded raw audio.
******************************************************************************/
g711_status_e g711_decode(g711_t *g711, unsigned num_samples, unsigned char *src_buf, short *dest_buf)
{
    int dest_buf_index, src_buf_index, src_buf_step, init, size;

    src_buf_step = g711->config.channel_select < g711->config.num_channels ? g711->config.num_channels : 1;
    init = g711->config.channel_select;
    size = g711->config.channel_select < g711->config.num_channels ? num_samples : num_samples * g711->config.num_channels;

    if (g711->config.num_channels == 1)
    {
        // Note: For Mono decodeing, since we are going from 8 bit buffer to 16 bit buffer, it requires us to expand
        //       the size of output audio buffer. Since it is possible that 'destBuf' and 'srcBuf' parameter
        //       overlap, we decode from the end of the 'src_buf' and save into end of the 'dest_buf'

        switch (g711->config.law) {
        case G711_ALAW:
            for (dest_buf_index = size - 1 ,src_buf_index = size - init - 1; src_buf_index >= 0; src_buf_index -= src_buf_step, dest_buf_index--) {
                dest_buf[dest_buf_index] = g711_alaw_to_linear(src_buf[src_buf_index]);
            }
            break;
        case G711_ULAW:
            for (dest_buf_index = size - 1 ,src_buf_index = size - init - 1; src_buf_index >= 0; src_buf_index -= src_buf_step, dest_buf_index--) {
                dest_buf[dest_buf_index] = g711_ulaw_to_linear(src_buf[src_buf_index]);
            }
            break;
        default:
            assert(0);
        }
    } 
    else
    {
        switch (g711->config.law) {
        case G711_ALAW:
            for (dest_buf_index = 0, src_buf_index = init; dest_buf_index < size; src_buf_index += src_buf_step, dest_buf_index++) {
                dest_buf[dest_buf_index] = g711_alaw_to_linear(src_buf[src_buf_index]);
            }
            break;
        case G711_ULAW:
            for (dest_buf_index = 0, src_buf_index = init; dest_buf_index < size; src_buf_index += src_buf_step, dest_buf_index++) {
                dest_buf[dest_buf_index] = g711_ulaw_to_linear(src_buf[src_buf_index]);
            }
            break;
        default:
            assert(0);
        }
    }

    return G711_OK;
}


/*************************************************************************
    Free the g711 codec instance
*************************************************************************/
g711_status_e g711_close(g711_t *g711)
{
    free(g711);
    return G711_OK;
}

具体的接口使用如下:

先定义 es2tsParam e2t;

static void rtpDataCallBack(DevSDK_SessionHandle hSession,DevSDK_StreamHandle hStream,Session_Event enmEvent
                            ,unsigned char *data,uint32 dataLen,uint32 frameType,int64 timestamp,uint32 frameSeqId,void *cbParam)
{
    if(enmEvent == EVENT_STREAM_OK)
    {
        bIsClosed = false;
        if (frameType == STREAM_FRAME_G711 || frameType == STREAM_FRAME_AAC)
            return ;                                                                     //shijianchuo
        progress = DevSDK_GetProgress((DevSDK_StreamHandle)hStream, (char *)data, dataLen,timestamp, frameType);
        LOGGER->Log(LOGGER_DEBUG, "progress: %d", progress);
//        if(forwardSocket == -1)
        if (MulticSocket == -1)
        {
            init_e2t_param(&e2t);
//            QString qEth = get_network_eth_name();
//            QString qEth = "eth1";
//            LOGGER->Log(LOGGER_DEBUG, "get network eth name: %s", qEth.toLatin1().data());
            InitSocket(&MulticSocket, 12345, "eth0", "225.1.2.3");
              //InitSocket(&MulticSocket, 12345, "eth1", "225.1.2.3");

//            forwardSocket = socket(AF_INET, SOCK_DGRAM, 0);
//            sAddr.sin_family = AF_INET;
//            sAddr.sin_port = htons(1234);
//            sAddr.sin_addr.s_addr = inet_addr("127.0.0.1");
//            nAddrLen = sizeof(sAddr);
        }
        else
        {
            int outPutLen = 0;
            memset(outPut, 0, sizeof(outPut));
            stamp_last += 3600;
            outPutLen = m_es2ts(&e2t,(unsigned char *)data,dataLen,frameType,stamp_last,outPut,1024*1024);

            if(outPutLen > 0)
            {
                try{
                send_ts_stream(MulticSocket, outPut, outPutLen);
                }
                catch(...){
                LOGGER->Log(LOGGER_ERROR, "send_ts_stream error!!!");
                }
//                sendto(forwardSocket, outPut, outPutLen, 0, (struct sockaddr *)&sAddr, nAddrLen);
            }
        }
    }
    else if (enmEvent == EVENT_STREAM_CLOSED)
    {
        LOGGER->Log(LOGGER_DEBUG, "playback stream close success.");
        bIsClosed = true;
    }
}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值