Linux 系统下安装mencoder工具并将y4m文件转换成yuv文件

本文详细指导如何在LinuxCentOS和Ubuntu系统上安装mencoder,包括使用yum和apt-get包管理器,以及如何检查音频/视频编码选项。重点讲解了如何将y4m文件转换为yuv格式的过程。
摘要由CSDN通过智能技术生成

Linux 系统下安装mencoder工具并将y4m文件转换成yuv文件

安装mencoder

用于CentOS

yum install mencoder

用于Ubuntu

sudo apt-get update
sudo apt-get install mencoder

检查音频/视频编码

mencoder -ovc help //视频编码器
Available codecs:
copy - frame copy, without re-encoding. Doesnt work with filters.
frameno - special audio-only file for 3-pass encoding, see DOCS.
raw - uncompressed video. Use fourcc option to set format explicitly.
nuv - nuppel video
lavc - libavcodec codecs - best quality!
vfw - VfW DLLs, read DOCS/HTML/en/encoding-guide.html.
qtvideo - QuickTime DLLs, currently only SVQ1/3 are supported.
xvid - XviD encoding
x264 - H.264 encoding

mencoder -ova help //音频编码器
Available codecs:
copy - frame copy, without re-encoding (useful for AC3)
pcm - uncompressed PCM audio
mp3lame - cbr/abr/vbr MP3 using libmp3lame
lavc - FFmpeg audio encoder (MP2, AC3, …)

转换

mencoder file1.y4m -ovc raw -of rawvideo -vf format=i420 -o file2.yuv
//file1.y4m文件需要提前在该文件夹目录下
//file2文件不需要提前创建,但不能同名,之后会生成在同一目录下

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
首先,需要在Android项目中引入MediaCodec和OpenGLES的相关库: ```java import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaFormat; import android.media.MediaMuxer; import android.opengl.EGL14; import android.opengl.EGLContext; import android.opengl.GLES20; import android.opengl.GLUtils; import android.os.Environment; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; ``` 接着,创建一个名为VideoEncoder的类,用于实现视频编码: ```java public class VideoEncoder { private static final String MIME_TYPE = "video/avc"; private static final int FRAME_RATE = 25; private static final int I_FRAME_INTERVAL = 10; private static final int TIMEOUT_US = 10000; private MediaCodec mEncoder; private MediaFormat mFormat; private MediaMuxer mMuxer; private int mTrackIndex; private boolean mMuxerStarted; private int mWidth; private int mHeight; private EGLContext mEglContext; private int mTextureId; public VideoEncoder(int width, int height, EGLContext eglContext, int textureId) { mWidth = width; mHeight = height; mEglContext = eglContext; mTextureId = textureId; } public void start(String outputPath) throws IOException { mFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); mFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mFormat.setInteger(MediaFormat.KEY_BIT_RATE, mWidth * mHeight * 4); mFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); mFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL); mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); mEncoder.configure(mFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mEncoder.start(); mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); mTrackIndex = -1; mMuxerStarted = false; } public void stop() { if (mEncoder != null) { mEncoder.stop(); mEncoder.release(); mEncoder = null; } if (mMuxer != null) { if (mMuxerStarted) { mMuxer.stop(); } mMuxer.release(); mMuxer = null; mTrackIndex = -1; mMuxerStarted = false; } } public void drainEncoder(boolean endOfStream) { final int TIMEOUT_USEC = 10000; if (endOfStream) { mEncoder.signalEndOfInputStream(); } MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); while (true) { int encoderStatus = mEncoder.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC); if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { if (!endOfStream) { break; } } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { if (mMuxerStarted) { throw new RuntimeException("format changed twice"); } MediaFormat newFormat = mEncoder.getOutputFormat(); mTrackIndex = mMuxer.addTrack(newFormat); mMuxer.start(); mMuxerStarted = true; } else if (encoderStatus < 0) { throw new RuntimeException("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); } else { ByteBuffer encodedData = mEncoder.getOutputBuffer(encoderStatus); if (encodedData == null) { throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null"); } if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { bufferInfo.size = 0; } if (bufferInfo.size != 0) { if (!mMuxerStarted) { throw new RuntimeException("muxer hasn't started"); } encodedData.position(bufferInfo.offset); encodedData.limit(bufferInfo.offset + bufferInfo.size); mMuxer.writeSampleData(mTrackIndex, encodedData, bufferInfo); } mEncoder.releaseOutputBuffer(encoderStatus, false); if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (!endOfStream) { throw new RuntimeException("reached end of stream unexpectedly"); } break; } } } } public void encodeFrame() { // Set up the EGL context and surface for offscreen rendering OffscreenSurface surface = new OffscreenSurface(mEglContext, mWidth, mHeight); surface.makeCurrent(); // Set up the texture renderer TextureRenderer renderer = new TextureRenderer(); renderer.surfaceCreated(); renderer.setExternalTexture(mTextureId); renderer.setRenderSize(mWidth, mHeight); // Draw the frame GLES20.glViewport(0, 0, mWidth, mHeight); renderer.surfaceChanged(); renderer.drawFrame(); // Read the pixel data into a buffer ByteBuffer pixelBuffer = ByteBuffer.allocateDirect(mWidth * mHeight * 4); GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer); // Convert the pixel data from RGBA to YUV420 byte[] yuv420 = new byte[mWidth * mHeight * 3 / 2]; NV21Converter.RGBtoYUV420SemiPlanar(pixelBuffer.array(), mWidth, mHeight, yuv420); // Encode the frame and write it to the muxer int inputBufferIndex = mEncoder.dequeueInputBuffer(TIMEOUT_US); if (inputBufferIndex >= 0) { ByteBuffer inputBuffer = mEncoder.getInputBuffer(inputBufferIndex); inputBuffer.clear(); inputBuffer.put(yuv420); mEncoder.queueInputBuffer(inputBufferIndex, 0, yuv420.length, System.nanoTime() / 1000, 0); } // Release the resources surface.release(); } } ``` 其中,OffscreenSurface类用于创建离屏渲染的EGLContext和Surface,NV21Converter类用于将RGBA格式的像素数据转换为YUV420格式的数据。 最后,我们可以在GLSurfaceView中使用VideoEncoder类进行视频编码: ```java public class MainActivity extends AppCompatActivity { private GLSurfaceView mGLSurfaceView; private VideoEncoder mVideoEncoder; private int mTextureId; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mGLSurfaceView = findViewById(R.id.glsurfaceview); mGLSurfaceView.setEGLContextClientVersion(2); mGLSurfaceView.setRenderer(new Renderer()); mGLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY); mTextureId = TextureHelper.createTexture(); mVideoEncoder = new VideoEncoder(640, 480, EGL14.eglGetCurrentContext(), mTextureId); try { String outputPath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/output.mp4"; mVideoEncoder.start(outputPath); } catch (IOException e) { e.printStackTrace(); } } @Override protected void onDestroy() { super.onDestroy(); mVideoEncoder.stop(); GLES20.glDeleteTextures(1, new int[]{mTextureId}, 0); } private class Renderer implements GLSurfaceView.Renderer { @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); } @Override public void onSurfaceChanged(GL10 gl, int width, int height) { GLES20.glViewport(0, 0, width, height); } @Override public void onDrawFrame(GL10 gl) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // Render the texture GLES20.glUseProgram(TextureRenderer.DEFAULT_VERTEX_SHADER_PROGRAM); TextureRenderer.renderTexture(mTextureId); // Encode the frame mVideoEncoder.encodeFrame(); } } } ``` 在GLSurfaceView的回调函数onDrawFrame中,我们首先使用TextureRenderer类将纹理渲染到屏幕上,然后调用VideoEncoder类的encodeFrame方法,将当前帧的像素数据编码为H.264格式的视频帧,并写入MP4文件中。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值