OpenGL保存bmp图片(字节对齐与RGB分量转换)

http://blog.sina.com.cn/s/blog_60707c0f0100q9vf.html

现在的OpenGL己经具有GL_BGR_EXT的方式读取像素,默认己经是4字节对齐了,所以下面代码注释掉了处理部分,如果你的OpenGL不支持,GL_BGR_EXT,把注释部分复原即可。

BOOL SaveBMPFromOpenGl(LPCTSTR lpFileName)
{
  ASSERT(lpFileName != NULL);
  GLint viewport[4];
  glGetIntegerv(GL_VIEWPORT, viewport);
  CRect rect;
  GetClientRect(rect);
  int width = viewport[2];
  int height = viewport[3];
   //glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
  int nAlignWidth = (width*24+31)/32;
  unsigned char* pdata = new unsigned char[nAlignWidth * height * 4];
  memset(pdata, 0, nAlignWidth * height * 4);
  glReadPixels(0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, pdata);
  由RGB变BGR
  //for(int i = 0 ; i < width * height * 3 ; i += 3)
  //{
  // unsigned char tmpRGB;
  // tmpRGB     = pdata[i];
  // pdata[i]   = pdata[i+2];
  // pdata[i+2] = tmpRGB;
  //}
  四字节对齐
  //int nAlignWidth = (width*24+31)/32;
  //unsigned char *pDataAfterAlign = (unsigned char *) malloc(4 * nAlignWidth * height);
  //memset(pDataAfterAlign, 0, 4 * nAlignWidth * height);
  //int nZero = 4*nAlignWidth - 3*width;
  //for(int j=0; j<height; j++)
  //{
  //  for(int i=0; i<width*3; i++)
  //  {
  //     pDataAfterAlign[j*(nZero+3*width)+i] = pdata[3*width*j+i];
  //  }
  //}
  BITMAPFILEHEADER Header;
  BITMAPINFOHEADER HeaderInfo;
  Header.bfType = 0x4D42;
  Header.bfReserved1 = 0;
  Header.bfReserved2 = 0;
  Header.bfOffBits = (DWORD)(sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER)) ;
  Header.bfSize =(DWORD)(sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER) + nAlignWidth* height * 4);
  HeaderInfo.biSize = sizeof(BITMAPINFOHEADER);
  HeaderInfo.biWidth = width;
  HeaderInfo.biHeight = height;
  HeaderInfo.biPlanes = 1;
  HeaderInfo.biBitCount = 24;
  HeaderInfo.biCompression = 0;
  HeaderInfo.biSizeImage = 4 * nAlignWidth * height;
  HeaderInfo.biXPelsPerMeter = 0;
  HeaderInfo.biYPelsPerMeter = 0;
  HeaderInfo.biClrUsed = 0;
  HeaderInfo.biClrImportant = 0; 
  FILE *pfile;
  if(!(pfile = fopen(lpFileName, "wb+")))
  {
    AfxMessageBox("保存图像失败!");
    return FALSE;
  }
  fwrite(&Header, 1, sizeof(BITMAPFILEHEADER), pfile);
  fwrite(&HeaderInfo, 1, sizeof(BITMAPINFOHEADER), pfile);
  fwrite(pdata, 1, HeaderInfo.biSizeImage, pfile);
  fclose(pfile);
  delete[] pdata;
  return TRUE;
}

O了,主要功能就是一个函数:

void glReadPixels( GLint x,
GLint y,
GLsizei width,
GLsizei height,
GLenum format,
GLenum type,
GLvoid * data);


  • 0
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
以下是一个简单的例子,演示如何使用OpenGL ES和MediaCodec将RGB图像转换为MP4视频。 首先,我们需要使用OpenGL ES将RGB图像渲染到纹理上。以下是一个简单的渲染器类: ```cpp class Renderer { public: Renderer(); ~Renderer(); void init(); void render(const uint8_t* data, int width, int height); private: GLuint mTexture; GLuint mProgram; GLuint mVertexShader; GLuint mFragmentShader; GLint mPositionHandle; GLint mTexCoordHandle; GLint mTextureHandle; }; ``` 在Renderer的构造函数中,我们可以编译和链接顶点着色器和片段着色器: ```cpp Renderer::Renderer() { const char* vertexShaderSrc = "attribute vec4 position;\n" "attribute vec2 texCoord;\n" "varying vec2 vTexCoord;\n" "void main() {\n" " gl_Position = position;\n" " vTexCoord = texCoord;\n" "}"; const char* fragmentShaderSrc = "precision mediump float;\n" "varying vec2 vTexCoord;\n" "uniform sampler2D texture;\n" "void main() {\n" " gl_FragColor = texture2D(texture, vTexCoord);\n" "}"; mVertexShader = loadShader(GL_VERTEX_SHADER, vertexShaderSrc); mFragmentShader = loadShader(GL_FRAGMENT_SHADER, fragmentShaderSrc); mProgram = glCreateProgram(); glAttachShader(mProgram, mVertexShader); glAttachShader(mProgram, mFragmentShader); glBindAttribLocation(mProgram, ATTRIB_VERTEX, "position"); glBindAttribLocation(mProgram, ATTRIB_TEXTURE, "texCoord"); glLinkProgram(mProgram); mPositionHandle = glGetAttribLocation(mProgram, "position"); mTexCoordHandle = glGetAttribLocation(mProgram, "texCoord"); mTextureHandle = glGetUniformLocation(mProgram, "texture"); } ``` 在render函数中,我们将RGB数据上传到纹理中,并在渲染器中绘制纹理: ```cpp void Renderer::render(const uint8_t* data, int width, int height) { if (!data) { return; } if (!mTexture) { glGenTextures(1, &mTexture); glBindTexture(GL_TEXTURE_2D, mTexture); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, data); } glBindTexture(GL_TEXTURE_2D, mTexture); glUseProgram(mProgram); glEnableVertexAttribArray(mPositionHandle); glEnableVertexAttribArray(mTexCoordHandle); glVertexAttribPointer(mPositionHandle, 2, GL_FLOAT, GL_FALSE, 0, gVertices); glVertexAttribPointer(mTexCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, gTexCoords); glUniform1i(mTextureHandle, 0); glDrawArrays(GL_TRIANGLE_FAN, 0, 4); glDisableVertexAttribArray(mPositionHandle); glDisableVertexAttribArray(mTexCoordHandle); } ``` 接下来,我们使用MediaCodec将渲染的帧编码为MP4视频。以下是一个简单的编码器类: ```cpp class Encoder { public: Encoder(); ~Encoder(); void init(int width, int height); void encodeFrame(const uint8_t* data, int64_t pts); void flush(); private: AMediaCodec* mEncoder; AMediaMuxer* mMuxer; int mTrackIndex; int mFrameIndex; bool mIsStarted; }; ``` 在Encoder的构造函数中,我们可以创建MediaCodec和MediaMuxer: ```cpp Encoder::Encoder() { mEncoder = AMediaCodec_createEncoderByType("video/avc"); AMediaFormat* format = AMediaFormat_new(); AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, "video/avc"); AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, 2000000); AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, 30); AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, 1); AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, COLOR_FormatSurface); AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, 720); AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_HEIGHT, 1280); AMediaCodec_configure(mEncoder, format, nullptr, nullptr, AMEDIACODEC_CONFIGURE_FLAG_ENCODE); AMediaFormat_delete(format); mTrackIndex = -1; mFrameIndex = 0; mIsStarted = false; } ``` 在init函数中,我们将MediaCodec的输出Surface传递给MediaMuxer,并启动MediaCodec和MediaMuxer: ```cpp void Encoder::init(int width, int height) { if (mIsStarted) { return; } AMediaCodec_start(mEncoder); AMediaMuxer_start(mMuxer); ANativeWindow* window = AMediaCodec_createInputSurface(mEncoder); AMediaMuxer_addTrack(mMuxer, AMediaCodec_getOutputFormat(mEncoder)); AMediaMuxer_start(mMuxer); mTrackIndex = 0; mIsStarted = true; } ``` 在encodeFrame函数中,我们从输出Surface中获取编码的数据,并将其写入MediaMuxer: ```cpp void Encoder::encodeFrame(const uint8_t* data, int64_t pts) { if (!mIsStarted) { return; } ANativeWindow* window = AMediaCodec_createInputSurface(mEncoder); AMediaCodec_configure(mEncoder, nullptr, nullptr, nullptr, 0); AMediaCodec_start(mEncoder); AMediaSurface* surface = AMediaSurface_fromSurfaceTexture((ASurfaceTexture*) window); AMediaCodec_setOutputSurface(mEncoder, surface); AMediaSurface_release(surface); AMediaCodecBufferInfo bufferInfo; int outputBufferIndex = AMediaCodec_dequeueOutputBuffer(mEncoder, &bufferInfo, 0); if (outputBufferIndex >= 0) { AMediaCodec_releaseOutputBuffer(mEncoder, outputBufferIndex, true); } int inputBufferIndex = AMediaCodec_dequeueInputBuffer(mEncoder, 0); if (inputBufferIndex >= 0) { AMediaCodecInputBuffer* inputBuffer = AMediaCodec_getInputBuffer(mEncoder, inputBufferIndex); size_t bufferSize = AMediaCodec_getInputBufferSize(mEncoder); uint8_t* buffer = AMediaCodecInputBuffer_getBuffer(inputBuffer); memcpy(buffer, data, bufferSize); AMediaCodec_queueInputBuffer(mEncoder, inputBufferIndex, 0, bufferSize, pts, 0); } outputBufferIndex = AMediaCodec_dequeueOutputBuffer(mEncoder, &bufferInfo, 0); if (outputBufferIndex >= 0) { AMediaCodecBufferInfo bufferInfo; AMediaCodec_getOutputBufferInfo(mEncoder, outputBufferIndex, &bufferInfo); uint8_t* outputBuffer = AMediaCodec_getOutputBuffer(mEncoder, outputBufferIndex); AMediaMuxer_writeSampleData(mMuxer, mTrackIndex, outputBuffer, &bufferInfo); AMediaCodec_releaseOutputBuffer(mEncoder, outputBufferIndex, false); mFrameIndex++; } } ``` 在flush函数中,我们停止MediaCodec和MediaMuxer: ```cpp void Encoder::flush() { if (!mIsStarted) { return; } AMediaCodec_signalEndOfInputStream(mEncoder); AMediaCodecBufferInfo bufferInfo; while (true) { int outputBufferIndex = AMediaCodec_dequeueOutputBuffer(mEncoder, &bufferInfo, 0); if (outputBufferIndex >= 0) { AMediaCodec_releaseOutputBuffer(mEncoder, outputBufferIndex, true); } else if (outputBufferIndex == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) { AMediaFormat* format = AMediaCodec_getOutputFormat(mEncoder); AMediaMuxer_addTrack(mMuxer, format); mTrackIndex = AMediaMuxer_addTrack(mMuxer, format); AMediaMuxer_start(mMuxer); } else { break; } } AMediaCodec_stop(mEncoder); AMediaCodec_delete(mEncoder); AMediaMuxer_stop(mMuxer); AMediaMuxer_delete(mMuxer); mIsStarted = false; } ``` 在使用时,我们可以像这样使用渲染器和编码器: ```cpp Renderer renderer; Encoder encoder; renderer.init(); encoder.init(width, height); for (int i = 0; i < numFrames; i++) { renderer.render(data[i], width, height); encoder.encodeFrame(data[i], i * 1000000 / 30); } encoder.flush(); ``` 请注意,这只是一个非常简单的例子,实际上要实现一个完整的视频编码器需要更多的步骤和细节。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值