实验要求:
将输出文件保存为可供YUVViewer观看的YUV文件
以TXT文件输出
输出DC图像并经过Huffman统计其概率分布
实验原理:
JPEG编解码
JPEG码流分析
实验过程:
1.JPEG解码器-将JPG文件解码并转换为YUV文件
static void write_yuv(const char filename,
int width,
int height,
unsigned char **components)
{
FILE F;
char temp[1024];
snprintf(temp, 1024, “%s.Y”, filename);
F = fopen(temp, “wb”);
fwrite(components[0], width, height, F);
fclose(F);
snprintf(temp, 1024, “%s.U”, filename);
F = fopen(temp, “wb”);
fwrite(components[1], widthheight/4, 1, F);
fclose(F);
snprintf(temp, 1024, “%s.V”, filename);
F = fopen(temp, “wb”);
fwrite(components[2], widthheight/4, 1, F);
fclose(F);
snprintf(temp, 1024, “%s.yuv”, filename);
F = fopen(temp, “wb”);
fwrite(components[0], widthheight, 1, F);
fwrite(components[1], widthheight/4, 1, F);
fwrite(components[2], widthheight/4, 1, F);
}
2.huffman 结构体
#define HUFFMAN_BITS_SIZE 256
#define HUFFMAN_HASH_NBITS 9
#define HUFFMAN_HASH_SIZE (1UL<<HUFFMAN_HASH_NBITS)
#define HUFFMAN_HASH_MASK (HUFFMAN_HASH_SIZE-1)
struct huffman_table
{
/ Fast look up table, using HUFFMAN_HASH_NBITS bits we can have directly the symbol,
- if the symbol is <0, then we need to look into the tree table /
short int lookup[HUFFMAN_HASH_SIZE];//存储码长未超过9的
/ code size: give the number of bits of a symbol is encoded /
unsigned char code_size[HUFFMAN_HASH_SIZE];
/ some place to store value that is not encoded in the lookup table - FIXME: Calculate if 256 value is enough to store all values
*/
uint16_t slowtable[16-HUFFMAN_HASH_NBITS][256];
};
3.component结构体
struct component
{
unsigned int Hfactor;//反映采样比例(水平)
unsigned int Vfactor;//垂直
float Q_table; / Pointer to the quantisation table to use *///量化表指针,指向的才是真正的码表,指向下面的结构体里面的
struct huffman_table *AC_table;//huffman码表指针
struct huffman_table DC_table;
short int previous_DC; / Previous DC coefficient /
short int DCT[64]; / DCT coef *///DCT解码之后临时存放
#if SANITY_CHECK
unsigned int cid;
#endif
};
3.jdec_private结构体
struct jdec_private
{
/* Public variables */
uint8_t components[COMPONENTS];//YUVbuffer
unsigned int width, height; / Size of the image */
unsigned int flags;
/* Private variables */
const unsigned char *stream_begin, *stream_end;//指向码流
unsigned int stream_length;
const unsigned char stream; / Pointer to the current stream */
unsigned int reservoir, nbits_in_reservoir;
struct component component_infos[COMPONENTS];//3个,分别为YUV
//量化表
float Q_tables[COMPONENTS][64]; /* quantization tables /
//huffman码表(至少4个)
struct huffman_table HTDC[HUFFMAN_TABLES]; / DC huffman tables ///2
struct huffman_table HTAC[HUFFMAN_TABLES]; / AC huffman tables ///2
int default_huffman_table_initialized;
int restart_interval;
int restarts_to_go; / MCUs left in this restart interval /
int last_rst_marker_seen; / Rst marker is incremented each time */
/* Temp space used after the IDCT to store each components /
uint8_t Y[644], Cr[64], Cb[64];
jmp_buf jump_state;
/* Internal Pointer use for colorspace conversion, do not modify it !!! */
uint8_t *plane[COMPONENTS];//三个指针,分别指向Ybuffer的开头、Ubuffer的开头、Vbuffer的开头
};
4.main函数
int main(int argc, char *argv[])
{
int output_format = TINYJPEG_FMT_YUV420P;
char *output_filename, *input_filename,*dc_filename,*ac_filename;
clock_t start_time, finish_time;
unsigned int duration;
int current_argument;
int benchmark_mode = 0;
#if TRACE
p_trace=fopen(TRACEFILE,“w”);
if (p_trace==NULL)
{
printf(“trace file open error!”);
}
#endif
if (argc < 3)
usage();
current_argument = 1;
while (1)
{
if (strcmp(argv[current_argument], “–benchmark”)==0)
benchmark_mode = 1;
else
break;
current_argument++;
}
if (argc < current_argument+2)
usage();
input_filename = argv[current_argument];
if (strcmp(argv[current_argument+1],“yuv420p”)==0)
output_format = TINYJPEG_FMT_YUV420P;
else if (strcmp(argv[current_argument+1],“rgb24”)==0)
output_format = TINYJPEG_FMT_RGB24;
else if (strcmp(argv[current_argument+1],“bgr24”)==0)
output_format = TINYJPEG_FMT_BGR24;
else if (strcmp(argv[current_argument+1],“grey”)==0)
output_format = TINYJPEG_FMT_GREY;
else
exitmessage(“Bad format: need to be one of yuv420p, rgb24, bgr24, grey\n”);
output_filename = argv[current_argument+2];
dc_filename=argv[current_argument+3];
ac_filename=argv[current_argument+4];
start_time = clock();
if (benchmark_mode)
load_multiple_times(input_filename, output_filename, output_format);
else
convert_one_image(input_filename, output_filename, output_format,dc_filename,ac_filename);//转换函数
finish_time = clock();
duration = finish_time - start_time;
snprintf(error_string, sizeof(error_string),“Decoding finished in %u ticks\n”, duration);
#if TRACE
fclose(p_trace);
#endif
return 0;
}
main函数中的核心函数:convert_one_images函数
int convert_one_image(const char *infilename, const char *outfilename, int output_format, const char *dcfilename, const char *acfilename)
{
FILE *fp,*fp_dc,*fp_ac;
unsigned int length_of_file;
unsigned int width, height;
unsigned char *buf;
struct jdec_private *jdec;
unsigned char *components[3];
/* Load the Jpeg into memory */
fp = fopen(infilename, “rb”);
fp_dc=fopen(dcfilename,“wb”);
fp_ac=fopen(acfilename,“wb”);
if (fp == NULL)
exitmessage(“Cannot open filename\n”);
length_of_file = filesize(fp);//测试码流长度,为整个码流的长度
buf = (unsigned char *)malloc(length_of_file + 4);
if (buf == NULL)
exitmessage(“Not enough memory for loading file\n”);
fread(buf, length_of_file, 1, fp);
fclose(fp);
/* Decompress it */
jdec = tinyjpeg_init();//仅初始化
if (jdec == NULL)
exitmessage(“Not enough memory to alloc the structure need for decompressing\n”);
if (tinyjpeg_parse_header(jdec, buf, length_of_file)<0)//解析文件头,jdec为结构体,头文件解开存在结构体中
exitmessage(tinyjpeg_get_errorstring(jdec));
/* Get the size of the image */
tinyjpeg_get_size(jdec, &width, &height);//从结构体中读出长宽
snprintf(error_string, sizeof(error_string),“Decoding JPEG image…\n”);
if (tinyjpeg_decode(jdec, output_format,fp_dc,fp_ac) < 0)
exitmessage(tinyjpeg_get_errorstring(jdec));
/*
- Get address for each plane (not only max 3 planes is supported), and
- depending of the output mode, only some components will be filled
- RGB: 1 plane, YUV420P: 3 planes, GREY: 1 plane
*/
tinyjpeg_get_components(jdec, components);//从结构体中提取分量
/* Save it */
switch (output_format)
{
case TINYJPEG_FMT_RGB24:
case TINYJPEG_FMT_BGR24:
write_tga(outfilename, output_format, width, height, components);
break;
case TINYJPEG_FMT_YUV420P:
write_yuv(outfilename, width, height, components);//需要修改的函数,要求写出来实一个完整的yuv文件
break;
case TINYJPEG_FMT_GREY:
write_pgm(outfilename, width, height, components);
break;
}
/* Only called this if the buffers were allocated by tinyjpeg_decode() /
tinyjpeg_free(jdec);
/ else called just free(jdec); */
free(buf);
return 0;
}
convert_one_images函数中的核心函数tinyjpeg_decode函数
int tinyjpeg_decode(struct jdec_private *priv, int pixfmt,FILE *fp_dc,FILE *fp_ac)//解码流函数
{
unsigned int x, y, xstride_by_mcu, ystride_by_mcu;
unsigned int bytes_per_blocklines[3], bytes_per_mcu[3];
unsigned char *dcbuffer,*acbuffer,*dcbuffer_write,*acbuffer_write,dcbuffer_del,acbuffer_del;
int i;
decode_MCU_fct decode_MCU;
const decode_MCU_fct decode_mcu_table;
const convert_colorspace_fct colorspace_array_conv;
convert_colorspace_fct convert_to_pixfmt;
/dcbuffer=(unsigned char)malloc(sizeof(unsigned char)((priv->heightpriv->width/64)3/2));
acbuffer=(unsigned char)malloc(sizeof(unsigned char)((priv->heightpriv->width/64)3/2));
dcbuffer_write=dcbuffer;
acbuffer_write=acbuffer;
dcbuffer_del=dcbuffer;
acbuffer_del=acbuffer;/
if (setjmp(priv->jump_state))
return -1;
/* To keep gcc happy initialize some array */
bytes_per_mcu[1] = 0;
bytes_per_mcu[2] = 0;
bytes_per_blocklines[1] = 0;
bytes_per_blocklines[2] = 0;
decode_mcu_table = decode_mcu_3comp_table;
switch (pixfmt) {
case TINYJPEG_FMT_YUV420P:
colorspace_array_conv = convert_colorspace_yuv420p;
if (priv->components[0] == NULL)
priv->components[0] = (uint8_t *)malloc(priv->width * priv->height);
if (priv->components[1] == NULL)
priv->components[1] = (uint8_t *)malloc(priv->width * priv->height/4);
if (priv->components[2] == NULL)
priv->components[2] = (uint8_t *)malloc(priv->width * priv->height/4);
bytes_per_blocklines[0] = priv->width;
bytes_per_blocklines[1] = priv->width/4;
bytes_per_blocklines[2] = priv->width/4;
bytes_per_mcu[0] = 8;
bytes_per_mcu[1] = 4;
bytes_per_mcu[2] = 4;
break;
case TINYJPEG_FMT_RGB24:
colorspace_array_conv = convert_colorspace_rgb24;
if (priv->components[0] == NULL)
priv->components[0] = (uint8_t *)malloc(priv->width * priv->height * 3);
bytes_per_blocklines[0] = priv->width * 3;
bytes_per_mcu[0] = 3*8;
break;
case TINYJPEG_FMT_BGR24:
colorspace_array_conv = convert_colorspace_bgr24;
if (priv->components[0] == NULL)
priv->components[0] = (uint8_t *)malloc(priv->width * priv->height * 3);
bytes_per_blocklines[0] = priv->width * 3;
bytes_per_mcu[0] = 3*8;
break;
case TINYJPEG_FMT_GREY:
decode_mcu_table = decode_mcu_1comp_table;
colorspace_array_conv = convert_colorspace_grey;
if (priv->components[0] == NULL)
priv->components[0] = (uint8_t *)malloc(priv->width * priv->height);
bytes_per_blocklines[0] = priv->width;
bytes_per_mcu[0] = 8;
break;
default:
#if TRACE
fprintf(p_trace,“Bad pixel format\n”);
fflush(p_trace);
#endif
return -1;
}
xstride_by_mcu = ystride_by_mcu = 8;
if ((priv->component_infos[cY].Hfactor | priv->component_infos[cY].Vfactor) == 1) {
decode_MCU = decode_mcu_table[0];
convert_to_pixfmt = colorspace_array_conv[0];
#if TRACE
fprintf(p_trace,“Use decode 1x1 sampling\n”);
fflush(p_trace);
#endif
} else if (priv->component_infos[cY].Hfactor == 1) {
decode_MCU = decode_mcu_table[1];
convert_to_pixfmt = colorspace_array_conv[1];
ystride_by_mcu = 16;
#if TRACE
fprintf(p_trace,“Use decode 1x2 sampling (not supported)\n”);
fflush(p_trace);
#endif
} else if (priv->component_infos[cY].Vfactor == 2) {
decode_MCU = decode_mcu_table[3];
convert_to_pixfmt = colorspace_array_conv[3];
xstride_by_mcu = 16;
ystride_by_mcu = 16;
#if TRACE
fprintf(p_trace,“Use decode 2x2 sampling\n”);
fflush(p_trace);
#endif
} else {
decode_MCU = decode_mcu_table[2];
convert_to_pixfmt = colorspace_array_conv[2];
xstride_by_mcu = 16;
#if TRACE
fprintf(p_trace,“Use decode 2x1 sampling\n”);
fflush(p_trace);
#endif
}
//for(i=0;i<(priv->height*priv->width/64);i++)
//{
// *dcbuffer=(unsigned char)((priv->component_infos[cY].DCT[0]+512)/4);//限制取值,防止超出最大范围
// acbuffer=(unsigned char)(priv->component_infos[cY].DCT[1]+128);
// dcbuffer++;
// acbuffer++;
//}
//for(i=0;i<((priv->heightpriv->width/64)/2);i++)
//{
// *dcbuffer=(unsigned char)0;
// acbuffer=(unsigned char)0;
// dcbuffer++;
// acbuffer++;
//}
//fwrite(dcbuffer_write,sizeof(unsigned char),(priv->heightpriv->width/64)3/2,fp_dc);
//fwrite(acbuffer_write,sizeof(unsigned char),(priv->heightpriv->width/64)*3/2,fp_ac);
//fclose(fp_dc);
//fclose(fp_ac);
//free(dcbuffer_del);
//free(acbuffer_del);
resync(priv);
/* Don’t forget to that block can be either 8 or 16 lines */
bytes_per_blocklines[0] *= ystride_by_mcu;
bytes_per_blocklines[1] *= ystride_by_mcu;
bytes_per_blocklines[2] *= ystride_by_mcu;
bytes_per_mcu[0] *= xstride_by_mcu/8;
bytes_per_mcu[1] *= xstride_by_mcu/8;
bytes_per_mcu[2] *= xstride_by_mcu/8;
/* Just the decode the image by macroblock (size is 8x8, 8x16, or 16x16) /
dcbuffer=(unsigned char)malloc(sizeof(unsigned char)((priv->height/ystride_by_mcu)(priv->width/xstride_by_mcu)3/2));
acbuffer=(unsigned char)malloc(sizeof(unsigned char)((priv->height/ystride_by_mcu)(priv->width/xstride_by_mcu)*3/2));
dcbuffer_write=dcbuffer;
acbuffer_write=acbuffer;
dcbuffer_del=dcbuffer;
acbuffer_del=acbuffer;
for (y=0; y < priv->height/ystride_by_mcu; y++)
{
//trace(“Decoding row %d\n”, y);
priv->plane[0] = priv->components[0] + (y * bytes_per_blocklines[0]);
priv->plane[1] = priv->components[1] + (y * bytes_per_blocklines[1]);
priv->plane[2] = priv->components[2] + (y * bytes_per_blocklines[2]);
for (x=0; x < priv->width; x+=xstride_by_mcu)
{
decode_MCU(priv);
convert_to_pixfmt(priv);
priv->plane[0] += bytes_per_mcu[0];
priv->plane[1] += bytes_per_mcu[1];
priv->plane[2] += bytes_per_mcu[2];
if (priv->restarts_to_go>0)
{
priv->restarts_to_go–;
if (priv->restarts_to_go == 0)
{
priv->stream -= (priv->nbits_in_reservoir/8);
resync(priv);
if (find_next_rst_marker(priv) < 0)
return -1;
}
}
*dcbuffer=(unsigned char)((priv->component_infos[cY].DCT[0]+512)/4);//限制取值,防止超出最大范围
acbuffer=(unsigned char)(priv->component_infos[cY].DCT[1]+128);
dcbuffer++;
acbuffer++;
}
}
for(i=0;i<((priv->height/ystride_by_mcu)(priv->width/xstride_by_mcu)/2);i++)
{
*dcbuffer=(unsigned char)128;
acbuffer=(unsigned char)128;
dcbuffer++;
acbuffer++;
}
fwrite(dcbuffer_write,sizeof(unsigned char),(priv->height/ystride_by_mcu)(priv->width/xstride_by_mcu)3/2,fp_dc);
fwrite(acbuffer_write,sizeof(unsigned char),(priv->height/ystride_by_mcu)(priv->width/xstride_by_mcu)*3/2,fp_ac);
fclose(fp_dc);
fclose(fp_ac);
free(dcbuffer_del);
free(acbuffer_del);
#if TRACE
fprintf(p_trace,“Input file size: %d\n”, priv->stream_length+2);
fprintf(p_trace,“Input bytes actually read: %d\n”, priv->stream - priv->stream_begin + 2);
fflush(p_trace);
#endif
return 0;
}
5.用TXT文件输出
static void build_quantization_table(float *qtable, const unsigned char ref_table)
{
/ Taken from libjpeg. Copyright Independent JPEG Group’s LLM idct.
- For float AA&N IDCT method, divisors are equal to quantization
- coefficients scaled by scalefactor[row]*scalefactor[col], where
- scalefactor[0] = 1
- scalefactor[k] = cos(k*PI/16) * sqrt(2) for k=1…7
- We apply a further scale factor of 8.
- What’s actually stored is 1/divisor so that the inner loop can
- use a multiplication rather than a division.
*/
int i, j;
//修改
int m,n;
static const double aanscalefactor[8] = {
1.0, 1.387039845, 1.306562965, 1.175875602,
1.0, 0.785694958, 0.541196100, 0.275899379
};
const unsigned char *zz = zigzag,*zz1=zigzag;
for (i=0; i<8; i++) {
for (j=0; j<8; j++) {
*qtable++ = ref_table[*zz++] * aanscalefactor[i] * aanscalefactor[j];
}
}
//修改输出量化表(未与质量因子相乘的原始量化表)
#if TRACE
for(m=0;m<8;m++)
{
for(n=0;n<8;n++)
{
fprintf(p_trace,"%d “,ref_table[*zz1++]);
}
fprintf(p_trace,”\n");
}
#endif
}
static int parse_DQT(struct jdec_private *priv, const unsigned char *stream)
{
int qi;
float *table;
const unsigned char dqt_block_end;
#if TRACE
fprintf(p_trace,"> DQT marker\n");
fflush(p_trace);
#endif
dqt_block_end = stream + be16_to_cpu(stream);//DQT的长度用两个字节存储,故be_to_cpu(stream)来获取DQT的长度
stream += 2; / Skip length */
while (stream < dqt_block_end)
{
qi = *stream++;
#if SANITY_CHECK
if (qi>>4)//右移判断是否为8bit量化,若不是则右移后为1,执行下列语句
snprintf(error_string, sizeof(error_string),“16 bits quantization table is not supported\n”);
if (qi>4)
snprintf(error_string, sizeof(error_string),“No more 4 quantization table is supported (got %d)\n”, qi);
#endif
table = priv->Q_tables[qi];
build_quantization_table(table, stream);//将DQT表存储到了priv结构体中
stream += 64;
}
#if TRACE
fprintf(p_trace,"< DQT marker\n");
fflush(p_trace);
#endif
return 0;
}
6.输出DC、AC
int tinyjpeg_decode(struct jdec_private *priv, int pixfmt,FILE *fp_dc,FILE *fp_ac)//解码流函数
{
unsigned int x, y, xstride_by_mcu, ystride_by_mcu;
unsigned int bytes_per_blocklines[3], bytes_per_mcu[3];
unsigned char *dcbuffer,*acbuffer,*dcbuffer_write,*acbuffer_write,dcbuffer_del,acbuffer_del;
int i;
decode_MCU_fct decode_MCU;
const decode_MCU_fct decode_mcu_table;
const convert_colorspace_fct colorspace_array_conv;
convert_colorspace_fct convert_to_pixfmt;
/dcbuffer=(unsigned char)malloc(sizeof(unsigned char)((priv->heightpriv->width/64)3/2));
acbuffer=(unsigned char)malloc(sizeof(unsigned char)((priv->heightpriv->width/64)3/2));
dcbuffer_write=dcbuffer;
acbuffer_write=acbuffer;
dcbuffer_del=dcbuffer;
acbuffer_del=acbuffer;/
if (setjmp(priv->jump_state))
return -1;
/* To keep gcc happy initialize some array */
bytes_per_mcu[1] = 0;
bytes_per_mcu[2] = 0;
bytes_per_blocklines[1] = 0;
bytes_per_blocklines[2] = 0;
decode_mcu_table = decode_mcu_3comp_table;
switch (pixfmt) {
case TINYJPEG_FMT_YUV420P:
colorspace_array_conv = convert_colorspace_yuv420p;
if (priv->components[0] == NULL)
priv->components[0] = (uint8_t *)malloc(priv->width * priv->height);
if (priv->components[1] == NULL)
priv->components[1] = (uint8_t *)malloc(priv->width * priv->height/4);
if (priv->components[2] == NULL)
priv->components[2] = (uint8_t *)malloc(priv->width * priv->height/4);
bytes_per_blocklines[0] = priv->width;
bytes_per_blocklines[1] = priv->width/4;
bytes_per_blocklines[2] = priv->width/4;
bytes_per_mcu[0] = 8;
bytes_per_mcu[1] = 4;
bytes_per_mcu[2] = 4;
break;
case TINYJPEG_FMT_RGB24:
colorspace_array_conv = convert_colorspace_rgb24;
if (priv->components[0] == NULL)
priv->components[0] = (uint8_t *)malloc(priv->width * priv->height * 3);
bytes_per_blocklines[0] = priv->width * 3;
bytes_per_mcu[0] = 3*8;
break;
case TINYJPEG_FMT_BGR24:
colorspace_array_conv = convert_colorspace_bgr24;
if (priv->components[0] == NULL)
priv->components[0] = (uint8_t *)malloc(priv->width * priv->height * 3);
bytes_per_blocklines[0] = priv->width * 3;
bytes_per_mcu[0] = 3*8;
break;
case TINYJPEG_FMT_GREY:
decode_mcu_table = decode_mcu_1comp_table;
colorspace_array_conv = convert_colorspace_grey;
if (priv->components[0] == NULL)
priv->components[0] = (uint8_t *)malloc(priv->width * priv->height);
bytes_per_blocklines[0] = priv->width;
bytes_per_mcu[0] = 8;
break;
default:
#if TRACE
fprintf(p_trace,“Bad pixel format\n”);
fflush(p_trace);
#endif
return -1;
}
xstride_by_mcu = ystride_by_mcu = 8;
if ((priv->component_infos[cY].Hfactor | priv->component_infos[cY].Vfactor) == 1) {
decode_MCU = decode_mcu_table[0];
convert_to_pixfmt = colorspace_array_conv[0];
#if TRACE
fprintf(p_trace,“Use decode 1x1 sampling\n”);
fflush(p_trace);
#endif
} else if (priv->component_infos[cY].Hfactor == 1) {
decode_MCU = decode_mcu_table[1];
convert_to_pixfmt = colorspace_array_conv[1];
ystride_by_mcu = 16;
#if TRACE
fprintf(p_trace,“Use decode 1x2 sampling (not supported)\n”);
fflush(p_trace);
#endif
} else if (priv->component_infos[cY].Vfactor == 2) {
decode_MCU = decode_mcu_table[3];
convert_to_pixfmt = colorspace_array_conv[3];
xstride_by_mcu = 16;
ystride_by_mcu = 16;
#if TRACE
fprintf(p_trace,“Use decode 2x2 sampling\n”);
fflush(p_trace);
#endif
} else {
decode_MCU = decode_mcu_table[2];
convert_to_pixfmt = colorspace_array_conv[2];
xstride_by_mcu = 16;
#if TRACE
fprintf(p_trace,“Use decode 2x1 sampling\n”);
fflush(p_trace);
#endif
}
//for(i=0;i<(priv->height*priv->width/64);i++)
//{
// *dcbuffer=(unsigned char)((priv->component_infos[cY].DCT[0]+512)/4);//限制取值,防止超出最大范围
// acbuffer=(unsigned char)(priv->component_infos[cY].DCT[1]+128);
// dcbuffer++;
// acbuffer++;
//}
//for(i=0;i<((priv->heightpriv->width/64)/2);i++)
//{
// *dcbuffer=(unsigned char)0;
// acbuffer=(unsigned char)0;
// dcbuffer++;
// acbuffer++;
//}
//fwrite(dcbuffer_write,sizeof(unsigned char),(priv->heightpriv->width/64)3/2,fp_dc);
//fwrite(acbuffer_write,sizeof(unsigned char),(priv->heightpriv->width/64)*3/2,fp_ac);
//fclose(fp_dc);
//fclose(fp_ac);
//free(dcbuffer_del);
//free(acbuffer_del);
resync(priv);
/* Don’t forget to that block can be either 8 or 16 lines */
bytes_per_blocklines[0] *= ystride_by_mcu;
bytes_per_blocklines[1] *= ystride_by_mcu;
bytes_per_blocklines[2] *= ystride_by_mcu;
bytes_per_mcu[0] *= xstride_by_mcu/8;
bytes_per_mcu[1] *= xstride_by_mcu/8;
bytes_per_mcu[2] *= xstride_by_mcu/8;
/* Just the decode the image by macroblock (size is 8x8, 8x16, or 16x16) /
dcbuffer=(unsigned char)malloc(sizeof(unsigned char)((priv->height/ystride_by_mcu)(priv->width/xstride_by_mcu)3/2));
acbuffer=(unsigned char)malloc(sizeof(unsigned char)((priv->height/ystride_by_mcu)(priv->width/xstride_by_mcu)*3/2));
dcbuffer_write=dcbuffer;
acbuffer_write=acbuffer;
dcbuffer_del=dcbuffer;
acbuffer_del=acbuffer;
for (y=0; y < priv->height/ystride_by_mcu; y++)
{
//trace(“Decoding row %d\n”, y);
priv->plane[0] = priv->components[0] + (y * bytes_per_blocklines[0]);
priv->plane[1] = priv->components[1] + (y * bytes_per_blocklines[1]);
priv->plane[2] = priv->components[2] + (y * bytes_per_blocklines[2]);
for (x=0; x < priv->width; x+=xstride_by_mcu)
{
decode_MCU(priv);
convert_to_pixfmt(priv);
priv->plane[0] += bytes_per_mcu[0];
priv->plane[1] += bytes_per_mcu[1];
priv->plane[2] += bytes_per_mcu[2];
if (priv->restarts_to_go>0)
{
priv->restarts_to_go–;
if (priv->restarts_to_go == 0)
{
priv->stream -= (priv->nbits_in_reservoir/8);
resync(priv);
if (find_next_rst_marker(priv) < 0)
return -1;
}
}
*dcbuffer=(unsigned char)((priv->component_infos[cY].DCT[0]+512)/4);//限制取值,防止超出最大范围
acbuffer=(unsigned char)(priv->component_infos[cY].DCT[1]+128);
dcbuffer++;
acbuffer++;
}
}
for(i=0;i<((priv->height/ystride_by_mcu)(priv->width/xstride_by_mcu)/2);i++)
{
*dcbuffer=(unsigned char)128;
acbuffer=(unsigned char)128;
dcbuffer++;
acbuffer++;
}
fwrite(dcbuffer_write,sizeof(unsigned char),(priv->height/ystride_by_mcu)(priv->width/xstride_by_mcu)3/2,fp_dc);
fwrite(acbuffer_write,sizeof(unsigned char),(priv->height/ystride_by_mcu)(priv->width/xstride_by_mcu)*3/2,fp_ac);
fclose(fp_dc);
fclose(fp_ac);
free(dcbuffer_del);
free(acbuffer_del);
#if TRACE
fprintf(p_trace,“Input file size: %d\n”, priv->stream_length+2);
fprintf(p_trace,“Input bytes actually read: %d\n”, priv->stream - priv->stream_begin + 2);
fflush(p_trace);
#endif
return 0;
}
实验结果
1.输出的YUV图像
2.TXT形式输出量化表
3.输出DC、AC
实验结论:
DCT变换能使能量集中,去除高频分量,将有记忆信源变为无记忆信源。