Android硬件H264保存文件

package com.interfaces.androidencode;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;

import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.os.Bundle;
import android.os.Environment;
import android.os.StrictMode;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

/**
 * oppo r7s(android 4.4.4)测试通过, 红米3(android 5.1.1)测试未通过
 * @author Administrator
 */
@SuppressLint("NewApi")  
public class MainActivity extends Activity{  
	private final static String TAG = "h264test";
    DatagramSocket socket;  
    InetAddress address;  
      
    AvcEncoder avcCodec;  
    Camera m_camera;    
    SurfaceView   m_prevewview;  
    SurfaceHolder m_surfaceHolder;  
	int width;  
	int height;  
    int framerate = 25;  
    int bitrate = 250000;  
    boolean isyv12;
    byte[] buf = null; 
    byte[] h264 = null; 
    private String fileName = "";
    private static final String remoteIp = "192.168.1.101";
    private static final int remotePort = 5000;
    @Override  
    protected void onCreate(Bundle savedInstanceState) {  
          
        StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder()  
        .detectDiskReads()  
        .detectDiskWrites()  
        .detectAll()    
        .penaltyLog()  
        .build());  
        StrictMode.setVmPolicy(new StrictMode.VmPolicy.Builder()  
        .detectLeakedSqlLiteObjects()  
        .detectLeakedClosableObjects()  
        .penaltyLog()  
        .penaltyDeath()  
        .build());  
          
        super.onCreate(savedInstanceState); 
        int test = 1;
        if(test == 1){
        	fileName = "/480p.yuv";
        	width = 640;  
        	height = 480;
        	isyv12 = false;
        }else{
        	fileName = "/720p.yuv";
        	width = 1280;  
        	height = 720; 
        	isyv12 = false;
        }
        buf = new byte[width*height*3/2]; 
        h264 = new byte[width*height*3/2]; 
        avcCodec = new AvcEncoder(width,height,framerate,bitrate, isyv12);  

        try {  
            socket = new DatagramSocket();  
            address = InetAddress.getByName(remoteIp);  
        } catch (SocketException e) {  
            e.printStackTrace();  
        } catch (UnknownHostException e) {  
            e.printStackTrace();  
        } 
        
        new H264FileTask().start();  
    }  
    
	class H264FileTask extends Thread{
		@Override
		public void run() {
			File infile = new File(Environment.getExternalStorageDirectory(), fileName);
			File outfile = new File(Environment.getExternalStorageDirectory(), "out.h264");
			if(!infile.exists()){
				Log.e(TAG," file is unexist.");
				return;
			}
			if(!infile.canRead()){
				Log.e(TAG," file is unread.");
				return;
			}
			if (outfile.exists()){
				Log.v(TAG,"outfile exists");
				outfile.delete();
			}
			try {
				RandomAccessFile raf = new RandomAccessFile(outfile, "rw");
				FileInputStream fis = new FileInputStream(infile);
				while ((fis.read(buf)) > 0){
					int ret = avcCodec.h264Encoder(buf, h264);
					Log.v(TAG, "ret = " + ret);
					if(ret > 0){
						/** 
		                 * [vlc] 
		                 * 1. 工具->首选项->显示设置->全部->输入/编解码器->去复用器->右边:去复用模块 -> H264视频去复用器 
		                 * 2. 媒体->打开网络串流-> udp://@:5000 
		                 */
						DatagramPacket packet=new DatagramPacket(h264, ret, address, remotePort);
						socket.send(packet);
						Log.v(TAG, "send packet");
						Thread.sleep(350);
						raf.write(h264, 0, ret);
					}
				}
				fis.close();
				raf.close();
			} catch(IOException e){
				e.printStackTrace();
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}
	}
}  
@SuppressLint("InlinedApi")  
class AvcEncoder { 
	private final static String TAG = "h264test";
    private MediaCodec mediaCodec; 
    private int m_width;
	private int m_height;
	private byte[] m_info = null;
	private byte[] yuv420 = null; 
	private boolean isYV12 = false;
    private static boolean isRecognizedFormat(int colorFormat) {  
        switch (colorFormat) {  
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:  
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:  
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:  
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:  
            case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:  
                return true;  
            default:  
                return false;  
        }  
    }  
    private static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) {  
        MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);  
        for (int i = 0; i < capabilities.colorFormats.length; i++) {  
            int colorFormat = capabilities.colorFormats[i];  
            if (isRecognizedFormat(colorFormat)) {  
                return colorFormat;  
            }  
        }  
        Log.e(TAG,"error format:" + codecInfo.getName() + " / " + mimeType);  
        return 0;  
    }  
    private static MediaCodecInfo selectCodec(String mimeType) {  
        int numCodecs = MediaCodecList.getCodecCount();  
        for (int i = 0; i < numCodecs; i++) {  
            MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);  
            if (!codecInfo.isEncoder())    continue;  
            String[] types = codecInfo.getSupportedTypes();  
            for (int j = 0; j < types.length; j++) {  
                if (types[j].equalsIgnoreCase(mimeType))  
                    return codecInfo;  
            }  
        }  
        return null;  
    }  
      
    public AvcEncoder(int width, int height, int framerate, int bitrate, boolean YV12) {   
        String mime = "video/avc";  
        m_width  = width;
		m_height = height;
		isYV12 = YV12;
		yuv420 = new byte[width*height*3/2];
        int colorFormat = selectColorFormat(selectCodec(mime), mime);  
        mediaCodec = MediaCodec.createEncoderByType(mime);  
        MediaFormat mediaFormat = MediaFormat.createVideoFormat(mime, width, height);  
        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);  
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, framerate);  
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);  
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);  
          
        mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);  
        mediaCodec.start();  
    }  
  
    public void close() {  
        try {  
            mediaCodec.stop();  
            mediaCodec.release();  
        } catch (Exception e){   
            e.printStackTrace();  
        }  
    }  
  
    public int h264Encoder(byte[] input, byte[] output){     
        int pos = 0; 
        if(isYV12){
        	swapYV12toI420(input, yuv420, m_width, m_height);
        }else{
        	yuv420 = input;
        }
        try {  
            ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();  
            ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();  
            int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);  
            Log.v(TAG,"inputBufferIndex="+inputBufferIndex);  
              
            if (inputBufferIndex >= 0){  
                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];  
                inputBuffer.clear();  
                inputBuffer.put(yuv420);  
                mediaCodec.queueInputBuffer(inputBufferIndex, 0, yuv420.length, 0, 0);  
            }  
            MediaCodec.BufferInfo bufferInfo =new MediaCodec.BufferInfo();  
            Log.v(TAG,"bufferInfo="+bufferInfo);  
            int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo,0);  
            Log.v(TAG,"outputBufferIndex="+outputBufferIndex);  
      
            while (outputBufferIndex >= 0){  
                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];  
                byte[] outData = new byte[bufferInfo.size];  
                outputBuffer.get(outData);  
                  
                if(m_info != null){               
                    System.arraycopy(outData, 0,  output, pos, outData.length);  
                    pos += outData.length;  
                      
                }else{  //保存pps sps 只有开始时 第一个帧里有, 保存起来后面用
                     ByteBuffer spsPpsBuffer = ByteBuffer.wrap(outData);    
                     if (spsPpsBuffer.getInt() == 0x00000001){    
                         m_info = new byte[outData.length];  
                         System.arraycopy(outData, 0, m_info, 0, outData.length);  
                     }else {    
                            return -1;  
                     }        
                }   
                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);  
                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);  
            }  
  
            if(output[4] == 0x65){ //key frame 编码器生成关键帧时只有 00 00 00 01 65 没有pps sps, 要加上
            	Log.v(TAG,"key frame");
            	Log.v(TAG,"m_info:"+bytesToHexStringPrint(m_info));
            	//m_info:00 00 00 01 67 64 00 29 AC 1B 1A 80 50 05 B9 00 00 00 01 68 EA 43 CB
                System.arraycopy(output, 0,  yuv420, 0, pos);  
                System.arraycopy(m_info, 0,  output, 0, m_info.length);  
                System.arraycopy(yuv420, 0,  output, m_info.length, pos);  
                pos += m_info.length;  
            }  
        } catch (Throwable t) {  
            t.printStackTrace();  
        }  
        return pos;  
    } 
	//yv12 转 yuv420p  yvu -> yuv 
    private void swapYV12toI420(byte[] yv12bytes, byte[] i420bytes, int width, int height) {      
    	System.arraycopy(yv12bytes, 0, i420bytes, 0,width*height);
    	System.arraycopy(yv12bytes, width*height+width*height/4, i420bytes, width*height,width*height/4);
    	System.arraycopy(yv12bytes, width*height, i420bytes, width*height+width*height/4,width*height/4);  
    } 
    public static String bytesToHexStringPrint(byte[] bArray){
		if(bArray == null || bArray.length == 0){
			Log.d(TAG, "bArray is null");
			return null;
		}
		StringBuffer sb = new StringBuffer(bArray.length);
		String sTemp;
		for (int i = 0; i < bArray.length; i++){
			sTemp = Integer.toHexString(0xFF & bArray[i]);
			if (sTemp.length() < 2)
				sb.append(0);
			sb.append(sTemp.toUpperCase()+" ");
		}
		return sb.toString();
	}    
} 

权限:

    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/> 
    <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
    <uses-permission android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEMS"/>  
    <uses-permission android:name="android.permission.INTERNET"/> 
    <uses-permission android:name="android.permission.RECORD_VIDEO"/> 
    <uses-permission android:name="android.permission.RECORD_AUDIO"/> 
	<uses-permission android:name="android.permission.CAMERA" /> 
    <uses-feature android:name="android.hardware.camera" /> 
    <uses-feature android:name="android.hardware.camera.autofocus" />  
    <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
    <uses-feature android:name="android.hardware.wifi" android:required="true" />
布局:

<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:tools="http://schemas.android.com/tools"
    android:layout_width="fill_parent"
    android:layout_height="fill_parent"
    android:orientation="horizontal"
    tools:context=".MainActivity" >

    <SurfaceView
         android:id="@+id/SurfaceViewPlay"
        android:layout_width="fill_parent"
        android:layout_height="fill_parent"
         />

</RelativeLayout>

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
要在 Android 上使用 FFmpeg 将 H.264 和 AAC 流保存为 TS 文件,你需要将 FFmpeg 编译为 Android 平台上的静态库,并在你的应用程序中使用它。 以下是一个使用 FFmpeg 在 Android 上将 H.264 和 AAC 流保存为 TS 文件的示例代码: ```java import java.io.IOException; public class FFmpegConverter { static { System.loadLibrary("ffmpeg"); } public native int convertToTS(String inputFilePath, String outputFilePath); public void convert(String inputFilePath, String outputFilePath) throws IOException { int result = convertToTS(inputFilePath, outputFilePath); if (result != 0) { throw new IOException("Failed to convert to TS: " + result); } } } ``` 在上面的代码中,`convertToTS()` 方法使用 JNI 调用 FFmpeg 库来执行转换操作。以下是 `convertToTS()` 方法的实现: ```c #include <jni.h> #include <string.h> #include <stdlib.h> #include <stdio.h> #include "libavutil/opt.h" #include "libavutil/samplefmt.h" #include "libavformat/avformat.h" #include "libswresample/swresample.h" #include "libavcodec/avcodec.h" JNIEXPORT jint JNICALL Java_com_example_ffmpegconverter_FFmpegConverter_convertToTS(JNIEnv *env, jobject thiz, jstring input_file_path, jstring output_file_path) { const char *input_path = (*env)->GetStringUTFChars(env, input_file_path, 0); const char *output_path = (*env)->GetStringUTFChars(env, output_file_path, 0); AVFormatContext *input_format_context = NULL; int ret = avformat_open_input(&input_format_context, input_path, NULL, NULL); if (ret < 0) { goto end; } ret = avformat_find_stream_info(input_format_context, NULL); if (ret < 0) { goto end; } AVFormatContext *output_format_context = NULL; ret = avformat_alloc_output_context2(&output_format_context, NULL, "mpegts", output_path); if (ret < 0) { goto end; } for (int i = 0; i < input_format_context->nb_streams; i++) { AVStream *input_stream = input_format_context->streams[i]; AVCodecParameters *input_codec_parameters = input_stream->codecpar; AVCodec *input_codec = avcodec_find_decoder(input_codec_parameters->codec_id); if (!input_codec) { goto end; } AVStream *output_stream = avformat_new_stream(output_format_context, input_codec); if (!output_stream) { goto end; } ret = avcodec_parameters_copy(output_stream->codecpar, input_codec_parameters); if (ret < 0) { goto end; } output_stream->codecpar->codec_tag = 0; if (output_format_context->oformat->flags & AVFMT_GLOBALHEADER) { output_stream->codecpar->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; } } ret = avio_open(&output_format_context->pb, output_path, AVIO_FLAG_WRITE); if (ret < 0) { goto end; } ret = avformat_write_header(output_format_context, NULL); if (ret < 0) { goto end; } AVPacket packet; av_init_packet(&packet); while (av_read_frame(input_format_context, &packet) == 0) { AVStream *input_stream = input_format_context->streams[packet.stream_index]; AVStream *output_stream = output_format_context->streams[packet.stream_index]; packet.pts = av_rescale_q_rnd(packet.pts, input_stream->time_base, output_stream->time_base, AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX); packet.dts = av_rescale_q_rnd(packet.dts, input_stream->time_base, output_stream->time_base, AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX); packet.duration = av_rescale_q(packet.duration, input_stream->time_base, output_stream->time_base); packet.pos = -1; ret = av_interleaved_write_frame(output_format_context, &packet); if (ret < 0) { goto end; } av_packet_unref(&packet); } ret = av_write_trailer(output_format_context); if (ret < 0) { goto end; } end: if (output_format_context) { avio_closep(&output_format_context->pb); avformat_free_context(output_format_context); } if (input_format_context) { avformat_close_input(&input_format_context); } (*env)->ReleaseStringUTFChars(env, input_file_path, input_path); (*env)->ReleaseStringUTFChars(env, output_file_path, output_path); return ret; } ``` 在 `convertToTS()` 方法中,我们首先打开输入文件并读取流信息,然后创建一个输出格式上下文和输出流,并将输入流的编解码参数复制到输出流中。然后我们打开输出文件并写入头部信息,接着从输入文件中读取数据包,并将其转换为输出流的时间基。最后,我们将包写入输出文件,并在完成后写入尾部信息。 编译 FFmpeg 静态库的详细步骤超出了本回答的范围,但你可以参考一些在线资源,例如 FFmpeg 官方文档和博客,以帮助你完成此任务。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值