stm32硬件JPEG编码过程分析

介绍

有时,当我们需要采集图像保存或者发送时,会因为大小限制不得不压缩图像,比较成熟的方式是采用JPEG压缩,有现成的jpeg压缩库入jpeglib等,但是在单片机上进行压缩耗时巨大,实测在STM32F429上压缩一张1920x1020、RGB888图片时耗时大约6秒,使用硬件JPEG压缩可以大大降低压缩时间。

我们这里在STM32H7上实现JPEG压缩代码,参考了ST官方例程,除了ST官方的DMA方式外,增加了阻塞获取的方式,简化代码难度。

实现

ST提供的HAL库对JPEG的操作相当的简单,其压缩函数就一个:

HAL_StatusTypeDef  HAL_JPEG_Encode_DMA(JPEG_HandleTypeDef *hjpeg, uint8_t *pDataInMCU, uint32_t InDataLength,uint8_t *pDataOut, uint32_t OutDataLength)

如果用阻塞方式的话:

HAL_StatusTypeDef  HAL_JPEG_Encode(JPEG_HandleTypeDef *hjpeg, uint8_t *pDataInMCU, uint32_t InDataLength,uint8_t *pDataOut, uint32_t OutDataLength, uint32_t Timeout)

可以看到,输入的参数中pDataInMCU不是图片原始数据,而是MCU(最小编解码单元)的数组,因为JPEG硬件接受的输入是基于YCbCr格式的处理单元,在调用这个函数前要对图片数据进行处理,好在ST公司已经为我们提供了处理代码,代码文件可以在ST的HAL固件库文件夹内找到,我这边的位置是

STM32Cube_FW_H7_V1.7.0\Utilities\JPEG

或者直接使用STM32Cube生成JPEG代码,Cube会自动将上述文件夹的文件复制到指定目录,并使能相关的宏。

文件夹内有3个文件

jpeg_utils.c,jpeg_utils.h,jpeg_utils_conf.h

我们只要改一下jpeg_utils_conf.h里的宏定义:

/* RGB Color format definition for JPEG encoding/Decoding : Should not be modified*/
#define JPEG_ARGB8888            0  /* ARGB8888 Color Format */
#define JPEG_RGB888              1  /* RGB888 Color Format   */
#define JPEG_RGB565              2  /* RGB565 Color Format   */

/*
 * Define USE_JPEG_DECODER
 */

#define USE_JPEG_DECODER 0 /* 1 or 0 */
/*
 * Define USE_JPEG_ENCODER
 */

#define USE_JPEG_ENCODER 1 /* 1 or 0 ********* Value different from default value : 1 ********** */

/*
 * Define JPEG_RGB_FORMAT
 */
#define JPEG_RGB_FORMAT JPEG_RGB565 /* JPEG_ARGB8888, JPEG_RGB888, JPEG_RGB565 ********* Value different from default value : 0 ********** */

/*
 * Define JPEG_SWAP_RG
 */
#define JPEG_SWAP_RG 0 /* 0 or 1 ********* Value different from default value : 0 ********** */

主要就是输入图像的格式需要指定一下,如果用Cube生成就不用自己改了。

DMA方式

DMA方式参考ST的例程,输入图片为565格式,存放在一个数组里,程序逻辑是从数组里取出一部分图像进行压缩,将压缩好的一部分数据进行存储,采用蚂蚁搬家的方式一块一块的实现完整图片的压缩。下面的代码基于RT THRAD操作系统,将输出的图像存在在文件系统里,注释里可以看到主要的函数的功能:

#include "jpeg_utils.h"
#include "jpeg.h"
#include "dfs_posix.h"

#define JPEG_BUFFER_EMPTY       0
#define JPEG_BUFFER_FULL        1

#define MAX_INPUT_LINES         16
#define BYTES_PER_PIXEL         2

#define CHUNK_SIZE_IN   ((rt_uint32_t)(320 * BYTES_PER_PIXEL * MAX_INPUT_LINES)) 
#define CHUNK_SIZE_OUT  ((rt_uint32_t) (1024 * 8))

typedef struct
{
    rt_uint8_t State;  
    rt_uint8_t *DataBuffer;
    rt_uint32_t DataBufferSize;
}JPEG_Data_BufferTypeDef;

static rt_uint8_t MCU_Data_InBuffer[CHUNK_SIZE_IN];
static rt_uint8_t JPEG_Data_OutBuffer[CHUNK_SIZE_OUT];

static JPEG_Data_BufferTypeDef Jpeg_OUT_BufferTypeDef = {JPEG_BUFFER_EMPTY , JPEG_Data_OutBuffer , 0};
static JPEG_Data_BufferTypeDef Jpeg_IN_BufferTypeDef = {JPEG_BUFFER_EMPTY , MCU_Data_InBuffer, 0};

static rt_uint32_t MCU_TotalNb                  = 0;//Total number of MCU blocks
static rt_uint32_t MCU_BlockIndex               = 0;
static rt_uint32_t Jpeg_HWEncodingEnd           = 0;
static rt_uint32_t Output_Is_Paused             = 0;
static rt_uint32_t Input_Is_Paused              = 0;

static rt_uint32_t RGB_InputImageIndex;
static rt_uint32_t RGB_InputImageSize_Bytes;
static rt_uint32_t RGB_InputImageAddress;

static JPEG_RGBToYCbCr_Convert_Function pRGBToYCbCr_Convert_Function;

static int fd;
static char file_name[16] = "";

rt_uint32_t JPEG_Encode_DMA(JPEG_HandleTypeDef *hjpeg, JPEG_ConfTypeDef *pInfo, uint32_t RGBImageBufferAddress, uint32_t RGBImageSize_Bytes)
{
    uint32_t DataBufferSize = 0;

    /* Reset all Global variables */
    MCU_TotalNb                = 0;
    MCU_BlockIndex             = 0;
    Jpeg_HWEncodingEnd         = 0;
    Output_Is_Paused           = 0;
    Input_Is_Paused            = 0;

    /* Get The Number of MCU(Minimal Codic Unit) Blocks */
    JPEG_GetEncodeColorConvertFunc(pInfo, &pRGBToYCbCr_Convert_Function, &MCU_TotalNb);

    /* Clear Output Buffer */
    Jpeg_OUT_BufferTypeDef.DataBufferSize = 0;
    Jpeg_OUT_BufferTypeDef.State = JPEG_BUFFER_EMPTY; 

    /* Fill input Buffers */  
    RGB_InputImageIndex = 0;
    RGB_InputImageAddress = RGBImageBufferAddress;
    RGB_InputImageSize_Bytes = RGBImageSize_Bytes;
    DataBufferSize= pInfo->ImageWidth * MAX_INPUT_LINES * BYTES_PER_PIXEL;

    if(RGB_InputImageIndex < RGB_InputImageSize_Bytes)
    {
        /* Pre-Processing : Convert RGB image to YCbCr blocks*/
        MCU_BlockIndex += pRGBToYCbCr_Convert_Function((rt_uint8_t *)(RGB_InputImageAddress + RGB_InputImageIndex), Jpeg_IN_BufferTypeDef.DataBuffer, 0, DataBufferSize,(rt_uint32_t*)(&Jpeg_IN_BufferTypeDef.DataBufferSize));
        Jpeg_IN_BufferTypeDef.State = JPEG_BUFFER_FULL;
        
        RGB_InputImageIndex += DataBufferSize;
    }

    /* Fill Encoding Params */
    HAL_JPEG_ConfigEncoding(hjpeg, pInfo);

    /* Result will be written to this file */
    fd = open(file_name, O_WRONLY | O_CREAT | O_TRUNC);
    if(fd < 0)
    {
        rt_kprintf("create jpg file failed\r\n");
        return 1;
    }

    /* Start JPEG encoding with DMA method */
    HAL_JPEG_Encode_DMA(hjpeg ,Jpeg_IN_BufferTypeDef.DataBuffer ,Jpeg_IN_BufferTypeDef.DataBufferSize ,Jpeg_OUT_BufferTypeDef.DataBuffer ,CHUNK_SIZE_OUT);

    return 0;
}

/**
 * @brief This function handles the output buffer
 * @param hjpeg pointer to JPEG_HandleTypeDef
 * @retval 1:Converting is complete, 0:Converting will continue
*/
rt_uint32_t JPEG_EncodeOutputHandler(JPEG_HandleTypeDef *hjpeg)
{
    rt_uint32_t bytesWritefile = 0;

    /* Output buffer is full */
    if(Jpeg_OUT_BufferTypeDef.State == JPEG_BUFFER_FULL)
    {  
        /* Write to file */
        write(fd, Jpeg_OUT_BufferTypeDef.DataBuffer ,Jpeg_OUT_BufferTypeDef.DataBufferSize);
        
        /* Reset output buffer */
        Jpeg_OUT_BufferTypeDef.State = JPEG_BUFFER_EMPTY;
        Jpeg_OUT_BufferTypeDef.DataBufferSize = 0;
        
        /* Close file if encoding was end */
        if(Jpeg_HWEncodingEnd != 0)
        {
            close(fd);
            return 1;
        }
        /* Or start to reveive data from encoder */
        else if((Output_Is_Paused == 1) && (Jpeg_OUT_BufferTypeDef.State == JPEG_BUFFER_EMPTY))
        {
            Output_Is_Paused = 0;
            HAL_JPEG_Resume(hjpeg, JPEG_PAUSE_RESUME_OUTPUT);            
        }
    }

    return 0;  
}

/**
 * @brief This function handles the input buffer
 * @param hjpeg: pointer to JPEG_HandleTypeDef
 * @retval None
*/
void JPEG_EncodeInputHandler(JPEG_HandleTypeDef *hjpeg)
{
    rt_uint32_t DataBufferSize = CHUNK_SIZE_IN;

    if((Jpeg_IN_BufferTypeDef.State == JPEG_BUFFER_EMPTY) && (MCU_BlockIndex <= MCU_TotalNb))  
    {
        /* Read and reorder lines from RGB input and fill data buffer */
        if(RGB_InputImageIndex < RGB_InputImageSize_Bytes)
        {
            /* Pre-Processing */
            MCU_BlockIndex += pRGBToYCbCr_Convert_Function((uint8_t *)(RGB_InputImageAddress + RGB_InputImageIndex), Jpeg_IN_BufferTypeDef.DataBuffer, 0, DataBufferSize, (rt_uint32_t*)(&Jpeg_IN_BufferTypeDef.DataBufferSize));
            Jpeg_IN_BufferTypeDef.State = JPEG_BUFFER_FULL;
            RGB_InputImageIndex += DataBufferSize;

            if(Input_Is_Paused == 1)
            {
                Input_Is_Paused = 0;
                HAL_JPEG_ConfigInputBuffer(hjpeg,Jpeg_IN_BufferTypeDef.DataBuffer, Jpeg_IN_BufferTypeDef.DataBufferSize);    

                HAL_JPEG_Resume(hjpeg, JPEG_PAUSE_RESUME_INPUT); 
            }
        }
        else
        {
            MCU_BlockIndex++;
        }
    }
}

/**
 * @brief This function will be called when the input data has been consumed by the JPEG peripheral,
 *              and to ask for a new data chunk
 * @param hjpeg: pointer to a JPEG_HandleTypeDef
 * @param NbEncodedData: Number of consummed data in the previous chunk in bytes
 * @retval None
*/
void HAL_JPEG_GetDataCallback(JPEG_HandleTypeDef *hjpeg, uint32_t NbEncodedData)
{
    if(NbEncodedData == Jpeg_IN_BufferTypeDef.DataBufferSize)
    {  
        Jpeg_IN_BufferTypeDef.State = JPEG_BUFFER_EMPTY;
        Jpeg_IN_BufferTypeDef.DataBufferSize = 0;

        HAL_JPEG_Pause(hjpeg, JPEG_PAUSE_RESUME_INPUT);
        Input_Is_Paused = 1;
    }
    else
    {
        HAL_JPEG_ConfigInputBuffer(hjpeg,Jpeg_IN_BufferTypeDef.DataBuffer + NbEncodedData, Jpeg_IN_BufferTypeDef.DataBufferSize - NbEncodedData);      
    }
}

/**
 * @brief This function will be called when the encoded data is ready
 * @param hjpeg: pointer to JPEG_HandleTypeDef
 * @param pDataOut: pointer to the output data buffer
 * @param OutDataLength: number in bytes of data available in the specified output buffer
 * @retval None
*/
void HAL_JPEG_DataReadyCallback (JPEG_HandleTypeDef *hjpeg, uint8_t *pDataOut, uint32_t OutDataLength)
{
    Jpeg_OUT_BufferTypeDef.State = JPEG_BUFFER_FULL;
    Jpeg_OUT_BufferTypeDef.DataBufferSize = OutDataLength;

    HAL_JPEG_Pause(hjpeg, JPEG_PAUSE_RESUME_OUTPUT);
    Output_Is_Paused = 1;

    HAL_JPEG_ConfigOutputBuffer(hjpeg, Jpeg_OUT_BufferTypeDef.DataBuffer, CHUNK_SIZE_OUT); 
}

/**
 * @brief This function will be called when the JPEG error was occurred
 *          The application can call the function HAL_JPEG_GetError() to retrieve the error codes
 * @param hjpeg: pointer to JPEG_HandleTypeDef
 * @retval None
*/
void HAL_JPEG_ErrorCallback(JPEG_HandleTypeDef *hjpeg)
{
    rt_kprintf("JPEG ERROR!\r\n");
}

/**
 * @brief This function will be called when the encoding is completed
 * @param hjpeg: pointer to JPEG_HandleTypeDef
 * @retval None
*/
void HAL_JPEG_EncodeCpltCallback(JPEG_HandleTypeDef *hjpeg)
{    
    Jpeg_HWEncodingEnd = 1;
}

extern JPEG_HandleTypeDef  JPEG_Handle;
void JPEG_IRQHandler(void)
{
    HAL_JPEG_IRQHandler(&JPEG_Handle);
}

void MDMA_IRQHandler()
{
    /* Check the interrupt and clear flag */
    HAL_MDMA_IRQHandler(JPEG_Handle.hdmain);
    HAL_MDMA_IRQHandler(JPEG_Handle.hdmaout);  
}

static void jpeg(int argc, char *argv[])
{
    if(argc != 3)
    {
        rt_kprintf("we need 2 parameters");
        return ;
    }
    strcpy(file_name, argv[1]);
    strcat(file_name,".jpg");

    extern JPEG_ConfTypeDef jpeg_info;

    jpeg_info.ImageQuality = atoi(argv[2]);

    extern rt_sem_t jpeg_sem;

    rt_sem_release(jpeg_sem);

}
MSH_CMD_EXPORT(jpeg,jpeg ops);

由于需要DMA功能,因此需要对MDMA进行初始化,初始化代码如下:

void HAL_JPEG_MspInit(JPEG_HandleTypeDef* hjpeg)
{
  if(hjpeg->Instance==JPEG)
  {
  /* USER CODE BEGIN JPEG_MspInit 0 */
    static MDMA_HandleTypeDef hmdma_jpeg_infifo_th;
    static MDMA_HandleTypeDef hmdma_jpeg_outfifo_th;
    __HAL_RCC_MDMA_CLK_ENABLE();
    __HAL_RCC_JPGDECEN_CLK_ENABLE();
  /* USER CODE END JPEG_MspInit 0 */
    /* Peripheral clock enable */
    __HAL_RCC_JPEG_CLK_ENABLE();

    /* JPEG MDMA Init */
    /* JPEG_INFIFO_TH Init */
    hmdma_jpeg_infifo_th.Instance = MDMA_Channel7;
    hmdma_jpeg_infifo_th.Init.Request = MDMA_REQUEST_JPEG_INFIFO_TH;
    hmdma_jpeg_infifo_th.Init.TransferTriggerMode = MDMA_BUFFER_TRANSFER;
    hmdma_jpeg_infifo_th.Init.Priority = MDMA_PRIORITY_LOW;
    hmdma_jpeg_infifo_th.Init.Endianness = MDMA_LITTLE_ENDIANNESS_PRESERVE;
    hmdma_jpeg_infifo_th.Init.SourceInc = MDMA_SRC_INC_BYTE;
    hmdma_jpeg_infifo_th.Init.DestinationInc = MDMA_DEST_INC_DISABLE;
    hmdma_jpeg_infifo_th.Init.SourceDataSize = MDMA_SRC_DATASIZE_BYTE;
    hmdma_jpeg_infifo_th.Init.DestDataSize = MDMA_DEST_DATASIZE_WORD;
    hmdma_jpeg_infifo_th.Init.DataAlignment = MDMA_DATAALIGN_PACKENABLE;
    hmdma_jpeg_infifo_th.Init.BufferTransferLength = 32;
    hmdma_jpeg_infifo_th.Init.SourceBurst = MDMA_SOURCE_BURST_32BEATS;
    hmdma_jpeg_infifo_th.Init.DestBurst = MDMA_DEST_BURST_16BEATS;
    hmdma_jpeg_infifo_th.Init.SourceBlockAddressOffset = 0;
    hmdma_jpeg_infifo_th.Init.DestBlockAddressOffset = 0;
    if (HAL_MDMA_Init(&hmdma_jpeg_infifo_th) != HAL_OK)
    {
      Error_Handler();
    }

    if (HAL_MDMA_ConfigPostRequestMask(&hmdma_jpeg_infifo_th, 0, 0) != HAL_OK)
    {
      Error_Handler();
    }

    __HAL_LINKDMA(hjpeg,hdmain,hmdma_jpeg_infifo_th);

    /* JPEG_OUTFIFO_TH Init */
    hmdma_jpeg_outfifo_th.Instance = MDMA_Channel6;
    hmdma_jpeg_outfifo_th.Init.Request = MDMA_REQUEST_JPEG_OUTFIFO_TH;
    hmdma_jpeg_outfifo_th.Init.TransferTriggerMode = MDMA_BUFFER_TRANSFER;
    hmdma_jpeg_outfifo_th.Init.Priority = MDMA_PRIORITY_LOW;
    hmdma_jpeg_outfifo_th.Init.Endianness = MDMA_LITTLE_ENDIANNESS_PRESERVE;
    hmdma_jpeg_outfifo_th.Init.SourceInc = MDMA_SRC_INC_DISABLE;
    hmdma_jpeg_outfifo_th.Init.DestinationInc = MDMA_DEST_INC_BYTE;
    hmdma_jpeg_outfifo_th.Init.SourceDataSize = MDMA_SRC_DATASIZE_WORD;
    hmdma_jpeg_outfifo_th.Init.DestDataSize = MDMA_DEST_DATASIZE_BYTE;
    hmdma_jpeg_outfifo_th.Init.DataAlignment = MDMA_DATAALIGN_PACKENABLE;
    hmdma_jpeg_outfifo_th.Init.BufferTransferLength = 32;
    hmdma_jpeg_outfifo_th.Init.SourceBurst = MDMA_SOURCE_BURST_32BEATS;
    hmdma_jpeg_outfifo_th.Init.DestBurst = MDMA_DEST_BURST_32BEATS;
    hmdma_jpeg_outfifo_th.Init.SourceBlockAddressOffset = 0;
    hmdma_jpeg_outfifo_th.Init.DestBlockAddressOffset = 0;
    if (HAL_MDMA_Init(&hmdma_jpeg_outfifo_th) != HAL_OK)
    {
      Error_Handler();
    }

    if (HAL_MDMA_ConfigPostRequestMask(&hmdma_jpeg_outfifo_th, 0, 0) != HAL_OK)
    {
      Error_Handler();
    }

    __HAL_LINKDMA(hjpeg,hdmaout,hmdma_jpeg_outfifo_th);

    /* JPEG interrupt Init */
    HAL_NVIC_SetPriority(JPEG_IRQn, 0, 0);
    HAL_NVIC_EnableIRQ(JPEG_IRQn);
  /* USER CODE BEGIN JPEG_MspInit 1 */
    HAL_NVIC_SetPriority(MDMA_IRQn, 0x0, 0x0);
    HAL_NVIC_EnableIRQ(MDMA_IRQn);
  /* USER CODE END JPEG_MspInit 1 */
  }
}

注意,由于输入数据与输出数据都需要DMA参与,因此要使能两个DMA通道。主函数的内容就比较简单了,内容如下:

JPEG_HandleTypeDef  JPEG_Handle;
JPEG_ConfTypeDef    jpeg_info;

rt_sem_t jpeg_sem = RT_NULL;

int main(void)
{
    int count = 1;
    rt_uint32_t jpeg_encode_processing_end;

    jpeg_sem = rt_sem_create("jsem", 0, RT_IPC_FLAG_FIFO);
    if (jpeg_sem == RT_NULL)
    {
        rt_kprintf("create dynamic semaphore failed.\n");
        return -1;
    }

    jpeg_info.ImageWidth = 320;
    jpeg_info.ImageHeight = 240;
    jpeg_info.ChromaSubsampling = JPEG_420_SUBSAMPLING;
    jpeg_info.ColorSpace = JPEG_YCBCR_COLORSPACE;
    jpeg_info.ImageQuality = 100;

    JPEG_InitColorTables();
    JPEG_Handle.Instance = JPEG;
    HAL_JPEG_Init(&JPEG_Handle);

    while(1)
    {
        rt_sem_take(jpeg_sem, RT_WAITING_FOREVER);

        extern rt_uint32_t Image_RGB565[];
        JPEG_Encode_DMA(&JPEG_Handle, &jpeg_info, (rt_uint32_t)Image_RGB565, jpeg_info.ImageWidth * jpeg_info.ImageHeight * 2);

        do
        {
            JPEG_EncodeInputHandler(&JPEG_Handle);
            jpeg_encode_processing_end = JPEG_EncodeOutputHandler(&JPEG_Handle);

        }while(jpeg_encode_processing_end == 0);
    }
    return RT_EOK;
}

图像质量从1-100,100的图像质量最好。

阻塞方式

看懂了DMA传输方式,阻塞压缩方式就相当简单了

JPEG_HandleTypeDef	JPEG_Handle;
JPEG_ConfTypeDef    jpeg_info;
JPEG_RGBToYCbCr_Convert_Function pRGBToYCbCr_Convert_Function;
uint32_t MCU_TotalNb=0;
uint32_t ConvertedDataCount=0;
uint32_t jpegOutSize=0; //生成的jpeg文件大小

void HJPEGEncode(uint8_t* srcBuf,uint8_t* tempYUVBuf,uint8_t* destBuf,uint32_t destBufLen, unsigned int quality)
{
    ConvertedDataCount=0;
    jpegOutSize=0;
    
    JPEG_InitColorTables();
    JPEG_Handle.Instance = JPEG;
    if(HAL_OK != HAL_JPEG_Init(&JPEG_Handle))
    {
    	rt_kprintf("MYHJPEG_Init:HAL_JPEG_Init error!!!\r\n");
    	return;
    }
        
    jpeg_info.ImageWidth = XLEN;
    jpeg_info.ImageHeight = YLEN;
    jpeg_info.ChromaSubsampling = JPEG_422_SUBSAMPLING;
    jpeg_info.ColorSpace = JPEG_YCBCR_COLORSPACE;
    jpeg_info.ImageQuality = quality;
        
    //根据编码信息,获取编码函数和数量
    if(HAL_OK!=JPEG_GetEncodeColorConvertFunc(&jpeg_info, &pRGBToYCbCr_Convert_Function, &MCU_TotalNb))
    {
    	rt_kprintf("MYHJPEG_Init:JPEG_GetEncodeColorConvertFunc error!!!\r\n");
    	return;
    }
        
    //预处理,将RGB转换为指定的YUV格式
    pRGBToYCbCr_Convert_Function(srcBuf, tempYUVBuf, 0, RGB888DATABYTELEN,&ConvertedDataCount);
        
    if(HAL_OK!=HAL_JPEG_ConfigEncoding(&JPEG_Handle, &jpeg_info))
    {
        rt_kprintf("doHJPEGEncode:HAL_JPEG_ConfigEncoding error!!!\r\n");
        return;
    }
        
    if(HAL_OK!=HAL_JPEG_Encode(&JPEG_Handle, tempYUVBuf, ConvertedDataCount,destBuf, destBufLen, MAXENCODETIME))
    {
        rt_kprintf("doHJPEGEncode:HAL_JPEG_Encode error!!!\r\n");
        return;
    }
}

不要忘了打开JPEG的时钟。

STM32JPEG是一种基于STM32微控制器的图像压缩库。它可以用于将高分辨率图像数据转换为更小的JPEG格式,以节省存储空间或传输带宽。 STM32JPEG库提供了一种简单而高效的方式来实现图像压缩。它通过使用基于硬件JPEG编码器来实现快速的压缩过程硬件加速使得图像压缩速度更快,从而提高了性能。 使用STM32JPEG库进行压缩的流程如下:首先,我们需要将原始图像数据加载到内存中,并使用适当的配置参数初始化JPEG编码器。然后,我们可以调用STM32JPEG库提供的API函数来启动压缩过程。在压缩过程中,库将逐行读取图像数据,并使用JPEG算法进行处理和压缩。最后,压缩后的数据将被写入指定的存储介质中。 使用STM32JPEG库进行图像压缩的好处是,它提供了高度可定制化的选项,使用户能够根据具体需求进行配置。我们可以调整压缩质量、压缩比例和压缩模式等参数,以获得最佳的压缩效果。 此外,STM32JPEG库还提供了在压缩过程中监控压缩进度的功能。我们可以通过监测回调函数来获取当前压缩的进度信息,从而实时了解压缩过程。 总结来说,STM32JPEG是一款适用于STM32微控制器的图像压缩库,通过硬件加速和高度可定制化的选项,它可以实现快速而高效的图像压缩,为嵌入式系统提供了节省存储空间和传输带宽的解决方案。
评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值