x264+mp4v2仿微信小视频(初版)

最近要离职了,也没什么事情做(其实有事情做,但是个人节奏比公司节奏要快),刚好最近在学FFmpeg和x264,突发的想写一个类似于微信小视频的Demo。第一次写博客,写的不好不要见怪可怜

好了,废话不多说,直接开搞吧。

首先是x264和FFmpeg的编译(环境:VMware workstation+Ubuntu 16.04 LTS)。我是根据Ubuntu14.04编译Android FFmpeg详细教程[转载]ffmpeg neon优化必看!!android下编译ffmpeg wit来编译优化FFmpeg的,后面找到了x264的编译和添加进FFmpeg把x264集成进FFmpeg中,下面是编译shell脚本(注:NDK的路径要自己修改,仅做参考)。FFmpeg编译脚本:

cd ../x264
./build_x264.sh
echo '+++++++++++++++++++++++++++
x264 compile finished
++++++++++++++++++++++++++++++
'
cd ../faac
./build.sh
echo '+++++++++++++++++++++++++++
faac compile finished
++++++++++++++++++++++++++++++
'
#make clean
cd ../ffmpeg
cd ./ffmpeg_latest
export NDK=/mnt/android-ndk-r9d
export PREBUILT=$NDK/toolchains/arm-linux-androideabi-4.8/prebuilt
export PLATFORM=$NDK/platforms/android-8/arch-arm
export PREFIX=../FFandroid/
export EXTRA_CFLAGS="-I../../x264/lib/include"
export EXTRA_LDFLAGS="-L../../x264/lib/lib"
export EXTRA_LIB="$NDK/sources/NFFmpeg/x264/lib/lib"
build_one(){
./configure --target-os=linux --prefix=$PREFIX \
--enable-cross-compile \
--enable-runtime-cpudetect \
--enable-asm \
--arch=arm \
--cc=$PREBUILT/linux-x86_64/bin/arm-linux-androideabi-gcc \
--cross-prefix=$PREBUILT/linux-x86_64/bin/arm-linux-androideabi- \
--extra-cflags="-fPIC -DANDROID -mfpu=neon -D__thumb__ -mthumb -Wfatal-errors -Wno-deprecated -mfloat-abi=softfp -marm -march=armv7-a $EXTRA_CFLAGS" \
--extra-ldflags=$EXTRA_LDFLAGS \
--extra-libs=-lfaac \
--disable-stripping \
--nm=$PREBUILT/linux-x86_64/bin/arm-linux-androideabi-nm \
--sysroot=$PLATFORM \
--disable-everything \http://blog.csdn.net/a992036795/article/details/53941436
--disable-vda \
--disable-iconv \
--disable-encoders \
--enable-libx264 \
--enable-encoder=libx264 \
--enable-libfaac \
--disable-muxers \
--enable-muxer=mov \
--enable-muxer=ipod \
--enable-muxer=psp \
--enable-muxer=mp4 \
--enable-muxer=avi \
--disable-decoders \
--enable-decoder=aac \
--enable-decoder=aac_latm \
--enable-decoder=h264 \
--enable-decoder=mpeg4 \
--disable-demuxers \
--enable-demuxer=h264 \
--enable-demuxer=mov \
--disable-parsers \
--enable-parser=aac \
--enable-parser=ac3 \
--enable-parser=h264 \
--disable-protocols \
--enable-protocol=file \
--enable-protocol=rtmp \
--disable-bsfs \
--enable-bsf=aac_adtstoasc \
--enable-bsf=h264_mp4toannexb \
--disable-indevs \
--enable-gpl --enable-neon --enable-pthreads --enable-static --disable-shared --enable-nonfree --enable-version3 --enable-small \
--enable-zlib --disable-ffprobe --disable-ffplay --disable-ffmpeg --disable-ffserver --disable-debug 
}
build_one

make clean
#4线程编译
make -j8
make install

$PREBUILT/linux-x86_64/bin/arm-linux-androideabi-ld -rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib -L$PREFIX/lib -soname libffmpeg.so -shared -nostdlib -Bsymbolic --whole-archive --no-undefined -o $PREFIX/libffmpeg.so $EXTRA_LIB/libx264.a libavcodec/libavcodec.a libavfilter/libavfilter.a libswresample/libswresample.a libavformat/libavformat.a libavutil/libavutil.a libswscale/libswscale.a libpostproc/libpostproc.a libavdevice/libavdevice.a -lc -lm -lz -ldl -llog --dynamic-linker=/system/bin/linker $PREBUILT/linux-x86_64/lib/gcc/arm-linux-androideabi/4.8/libgcc.a
x264编译脚本:

cd x264
export NDK=/mnt/android-ndk-r9d
export TOOLCHAIN=$NDK/toolchains/arm-linux-androideabi-4.8/prebuilt/linux-x86_64
export PLATFORM=$NDK/platforms/android-9/arch-arm
export PREFIX=../lib

./configure \
  --prefix=$PREFIX \
  --enable-static \
  --enable-shared \
  --enable-pic \
  --disable-cli \
  --host=arm-linux \
  --cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
  --extra-cflags="-mfpu=neon -D__thumb__ -mthumb -Wfatal-errors -Wno-deprecated -mfloat-abi=softfp -marm -march=armv7-a" \
  --sysroot=$PLATFORM

  
make -j8
make install

cd ..
文件的目录结构参考这篇博客的 Android移植带有libx264实现的ffmpeg。这样就把x264集成到FFmpeg了(编译的过程具体就不说了,各种痛苦 快哭了)。得到的文件如下图:

将编译之后的文件拷贝出来,然后新建一个Android工程,为Android工程添加jni目录,然后去网上下载mp4v2的源码并拷贝到jni目录下,项目结构如下(因为编译出问题,所以 把FFmpeg和x264的文件放入到mp4v2文件目录下):

下面是Android.mk脚本文件:

LOCAL_PATH:=$(call my-dir)

# FFmpeg library
include $(CLEAR_VARS)
LOCAL_MODULE := ffmpegX264
LOCAL_SRC_FILES := prebuilt/libffmpeg.so
include $(PREBUILT_SHARED_LIBRARY)

# Program
include $(CLEAR_VARS)
LOCAL_C_INCLUDES += $(LOCAL_PATH)/ffmpeg $(LOCAL_PATH)/x264

LOCAL_MODULE:= ffwithx264
LOCAL_SRC_FILES:= Nsimplest_ffmpeg_encoder.c\
    src/3gp.cpp src/atom_ac3.cpp \
    src/atom_amr.cpp src/atom_avc1.cpp src/atom_avcC.cpp \
    src/atom_chpl.cpp src/atom_colr.cpp src/atom_d263.cpp \
    src/atom_dac3.cpp src/atom_damr.cpp src/atom_dref.cpp \
    src/atom_elst.cpp src/atom_enca.cpp src/atom_encv.cpp \
    src/atom_free.cpp src/atom_ftyp.cpp src/atom_ftab.cpp \
    src/atom_gmin.cpp src/atom_hdlr.cpp src/atom_hinf.cpp \
    src/atom_hnti.cpp src/atom_href.cpp src/atom_mdat.cpp \
    src/atom_mdhd.cpp src/atom_meta.cpp src/atom_mp4s.cpp \
    src/atom_mp4v.cpp src/atom_mvhd.cpp src/atom_nmhd.cpp \
    src/atom_ohdr.cpp src/atom_pasp.cpp src/atom_root.cpp \
    src/atom_rtp.cpp src/atom_s263.cpp src/atom_sdp.cpp \
    src/atom_sdtp.cpp src/atom_smi.cpp src/atom_sound.cpp \
    src/atom_standard.cpp src/atom_stbl.cpp src/atom_stdp.cpp \
    src/atom_stsc.cpp src/atom_stsd.cpp src/atom_stsz.cpp \
    src/atom_stz2.cpp src/atom_text.cpp src/atom_tfhd.cpp \
    src/atom_tkhd.cpp src/atom_treftype.cpp src/atom_trun.cpp \
    src/atom_tx3g.cpp src/atom_udta.cpp src/atom_url.cpp \
    src/atom_urn.cpp src/atom_uuid.cpp src/atom_video.cpp \
    src/atom_vmhd.cpp src/atoms.h src/cmeta.cpp \
    src/descriptors.cpp src/descriptors.h src/exception.cpp \
    src/exception.h src/enum.h src/enum.tcc src/impl.h \
    src/isma.cpp src/log.h src/log.cpp src/mp4.cpp src/mp4array.h \
    src/mp4atom.cpp src/mp4atom.h src/mp4container.cpp \
    src/mp4container.h src/mp4descriptor.cpp src/mp4descriptor.h \
    src/mp4file.cpp src/mp4file.h src/mp4file_io.cpp \
    src/mp4info.cpp src/mp4property.cpp src/mp4property.h \
    src/mp4track.cpp src/mp4track.h src/mp4util.cpp src/mp4util.h \
    src/ocidescriptors.cpp src/ocidescriptors.h src/odcommands.cpp \
    src/odcommands.h src/qosqualifiers.cpp src/qosqualifiers.h \
    src/rtphint.cpp src/rtphint.h src/src.h src/text.cpp \
    src/text.h src/util.h src/bmff/bmff.h src/bmff/impl.h \
    src/bmff/typebmff.cpp src/bmff/typebmff.h \
    src/itmf/CoverArtBox.cpp src/itmf/CoverArtBox.h \
    src/itmf/Tags.cpp src/itmf/Tags.h src/itmf/generic.cpp \
    src/itmf/generic.h src/itmf/impl.h src/itmf/itmf.h \
    src/itmf/type.cpp src/itmf/type.h \
    src/qtff/ColorParameterBox.cpp src/qtff/ColorParameterBox.h \
    src/qtff/PictureAspectRatioBox.cpp \
    src/qtff/PictureAspectRatioBox.h src/qtff/coding.cpp \
    src/qtff/coding.h src/qtff/impl.h src/qtff/qtff.h \
    libplatform/endian.h libplatform/impl.h \
    libplatform/io/File.cpp libplatform/io/File.h \
    libplatform/io/FileSystem.cpp libplatform/io/FileSystem.h \
    libplatform/number/random.h libplatform/platform.h \
    libplatform/platform_base.h libplatform/platform_posix.h \
    libplatform/process/process.h \
    libplatform/prog/option.cpp libplatform/prog/option.h \
    libplatform/sys/error.cpp libplatform/sys/error.h \
    libplatform/time/time.cpp libplatform/time/time.h \
    libplatform/warning.h libplatform/io/File_posix.cpp \
    libplatform/io/FileSystem_posix.cpp \
    libplatform/number/random_posix.cpp \
    libplatform/process/process_posix.cpp \
    libplatform/time/time_posix.cpp \
    libutil/Database.cpp \
    libutil/Database.h libutil/Timecode.cpp libutil/Timecode.h \
    libutil/TrackModifier.cpp libutil/TrackModifier.h \
    libutil/Utility.cpp libutil/Utility.h libutil/crc.cpp \
    libutil/crc.h libutil/impl.h libutil/other.cpp libutil/other.h \
    libutil/util.h

LOCAL_ARM_MODE := arm  
LOCAL_CFLAGS := -fexceptions  

LOCAL_LDLIBS := -llog -lz
LOCAL_CXXFLAGS :=-fexceptions -Wno-write-strings
#LOCAL_MODULE:= ffwithx264
LOCAL_CPPFLAGS := -O2 -fexceptions -DHAVE_SOCKLEN_T -DHAVE_STRUCT_IOVEC
LOCAL_MODULE_TAGS := optional
LOCAL_SHARED_LIBRARIES := ffmpegX264

include $(BUILD_SHARED_LIBRARY)
至此,我们就可以写代码了 大笑

先上jni的代码吧

/*
 * Copyright (C) 2009 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 *
 */
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <x264/x264.h>
#include "mp4v2/mp4v2.h"

#include "ffmpeg/libavcodec/avcodec.h"
#include "ffmpeg/libavformat/avformat.h"
#include "ffmpeg/libavutil/avutil.h"
#include "ffmpeg/libavutil/frame.h"
#include "ffmpeg/libavutil/samplefmt.h"

#ifdef ANDROID
#include <jni.h>
#include <android/log.h>
#define LOGE(format, ...)  __android_log_print(ANDROID_LOG_ERROR, "(>_<)", format, ##__VA_ARGS__)
#define LOGI(format, ...)  __android_log_print(ANDROID_LOG_INFO,  "(^_^)", format, ##__VA_ARGS__)
#else
#define LOGE(format, ...)  printf("(>_<) " format "\n", ##__VA_ARGS__)
#define LOGI(format, ...)  printf("(^_^) " format "\n", ##__VA_ARGS__)
#endif
#define DATA_MAX 3000000
#define H264_MTU 1024

long f_handle;
long trackID;
unsigned char sps_pps_640[17] = { 0x67, 0x42, 0x40, 0x1F, 0x96, 0x54, 0x05, 0x01, 0xED, 0x00, 0xF3, 0x9E, 0xA0, 0x68, 0xCE, 0x38, 0x80 }; //存储sps和pps
unsigned char sps_pps[17];
unsigned char ubuffer[2] = { 0x11, 0x88 };

AVCodec *pCodec;
AVCodecContext *pCodecCtx = NULL;
AVPacket pkt;
AVFrame *pFrame;
char *padts;

typedef struct {
	x264_param_t * param;
	x264_t *handle;
	x264_picture_t * picture;
	x264_nal_t *nal;
	int m_width;
	int m_height;
	int m_fps;
} Encoder;

//====================================================




jlong initEncoder(int width, int height, int fps) {
	Encoder * en = (Encoder *) malloc(sizeof(Encoder));
	en->param = (x264_param_t *) malloc(sizeof(x264_param_t));
	en->picture = (x264_param_t *) malloc(sizeof(x264_picture_t));

	x264_param_default_preset(en->param, "veryfast", "zerolatency");
	x264_param_apply_profile(en->param, "baseline");

	en->param->i_log_level = X264_LOG_NONE;
	en->m_width = en->param->i_width = width; //set frame width
	en->m_height = en->param->i_height = height; //set frame height
	en->param->i_threads = X264_SYNC_LOOKAHEAD_AUTO;
	en->param->rc.i_lookahead = 0;
	en->m_fps = en->param->i_fps_num = fps;
	en->param->i_fps_den = 1;

	en->param->i_frame_total = 0;
	en->param->rc.i_bitrate = 1024;
	en->param->i_timebase_den = en->param->i_fps_num;
	en->param->i_timebase_num = en->param->i_fps_den;
	en->param->i_bframe_adaptive = X264_B_ADAPT_TRELLIS;
	en->param->rc.i_rc_method = X264_RC_CQP;
	en->param->analyse.i_me_method = X264_ME_DIA;
	en->param->analyse.i_subpel_refine = 2;
	en->param->rc.i_qp_min = 10;
	en->param->rc.i_qp_max = 51;
	en->param->i_keyint_max=fps;
	en->param->i_keyint_min=fps;
	en->param->analyse.intra = X264_ANALYSE_I8x8 | X264_ANALYSE_PSUB16x16;

	if ((en->handle = x264_encoder_open(en->param)) == 0) {
		return 0;
	}
	/* Create a new pic */
	x264_picture_alloc(en->picture, X264_CSP_I420, en->param->i_width, en->param->i_height);
	return (jlong) en;
}

void CloseEncoder(jlong handle) {
	Encoder * en = (Encoder *) handle;
	if (en->picture) {
		x264_picture_clean(en->picture);
		free(en->picture);
		en->picture = 0;
	}
	if (en->param) {
		free(en->param);
		en->param = 0;
	}
	if (en->handle) {
		x264_encoder_close(en->handle);
	}
	free(en);
}

int encodeBuffer(JNIEnv*env, jlong handle, jbyteArray in, jbyteArray out) {

	Encoder * en = (Encoder *) handle;
	x264_picture_t pic_out;

	int i_data = 0;
	int nNal = -1;
	int result = 0;
	int i = 0, j = 0;
	int nPix = 0;

	jbyte * Buf = (jbyte*) (*env)->GetByteArrayElements(env, in, 0);
	jbyte * h264Buf = (jbyte*) (*env)->GetByteArrayElements(env, out, 0);

	int nPicSize = en->param->i_width * en->param->i_height;

	jbyte * y = en->picture->img.plane[0];
	jbyte * v = en->picture->img.plane[1];
	jbyte * u = en->picture->img.plane[2];
	memcpy(en->picture->img.plane[0], Buf, nPicSize);
	for (i = 0; i < nPicSize / 4; i++) {
		*(u + i) = *(Buf + nPicSize + i * 2);
		*(v + i) = *(Buf + nPicSize + i * 2 + 1);
	}
	en->picture->i_type = X264_TYPE_AUTO;

	if (x264_encoder_encode(en->handle, &(en->nal), &nNal, en->picture, &pic_out) < 0) {
		return -1;
	}
	for (i = 0; i < nNal; i++) {
		memcpy(h264Buf, en->nal[i].p_payload, en->nal[i].i_payload);
		h264Buf += en->nal[i].i_payload;
		result += en->nal[i].i_payload;
	}
	(*env)->ReleaseByteArrayElements(env, in, Buf, 0);
	(*env)->ReleaseByteArrayElements(env, out, h264Buf, 0);
	return result;
}

//====================================================

jlong Java_com_wang_cameratest_MainActivity_EncoderInit(JNIEnv* env, jobject thiz, jint width, jint height, jint fps) {
	//f_handle=createMp4Handle(env, path);
	jlong result = initEncoder(width, height, fps);
	return result;
}

jint Java_com_wang_cameratest_MainActivity_EncoderEnd(JNIEnv* env, jobject thiz, jlong enc_handle, jlong f_handle) {
	CloseEncoder(enc_handle);
	return 0;
}
jint Java_com_wang_cameratest_MainActivity_EncodeBuffer(JNIEnv* env, jobject thiz, jlong enc_handle, jbyteArray in, jint insize,
		jbyteArray out) {
	int result = encodeBuffer(env, enc_handle, in, out);
	return result;
}

jlong Java_com_wang_cameratest_MainActivity_createMp4File(JNIEnv* env, jobject thiz, jstring fileName) {
	//initFFmpeg();
	const char* local_title = (*env)->GetStringUTFChars(env, fileName, 0);
	MP4FileHandle fileHandle = MP4Create(local_title, 0);
	(*env)->ReleaseStringUTFChars(env, fileName, local_title);
	if (fileHandle == MP4_INVALID_FILE_HANDLE) {
		LOGI("Wang createMp4Handle failed %lld ", fileHandle);
		return -1;
	}
	LOGI("Wang createMp4Handle  %lld ", fileHandle);
	return fileHandle;
}
jlong Java_com_wang_cameratest_MainActivity_addVideoTrack(JNIEnv* env, jobject thiz, jlong enc, jlong file_handle) {
	Encoder * en = (Encoder *) enc;
	LOGI("Wang addVideoTrack   %lld ", file_handle);
	memcpy(sps_pps, sps_pps_640, 17);
	MP4SetTimeScale(file_handle, 90000);
	MP4TrackId video = MP4AddH264VideoTrack(file_handle, 90000, 90000 / en->m_fps, en->m_width, en->m_height, sps_pps[1], sps_pps[2],
			sps_pps[3], 3);
	//MP4TrackId result = MP4AddH264VideoTrack(fhandle, 90000, 90000 / en->m_fps, en->m_width, en->m_height, 0x64, 0x00, 0x1f, 3);

	if (video == MP4_INVALID_TRACK_ID) {
		Java_com_wang_cameratest_MainActivity_closeMp4File(file_handle);
		return -2;
	}
	MP4AddH264SequenceParameterSet(file_handle, video, sps_pps, 13);
	MP4AddH264PictureParameterSet(file_handle, video, sps_pps + 13, 4);
	MP4SetVideoProfileLevel(file_handle, 0x0F);
	//MP4SetAudioProfileLevel(f_handle, 0x0F);
	return video;
}

jlong Java_com_wang_cameratest_MainActivity_addAudioTrack(JNIEnv* env, jobject thiz, jlong file_handle, jint timeScale) {
	LOGI("Wang addVideoTrack   %lld ", file_handle);
	//MP4SetTimeScale(f_handle, 90000);
	//MP4_MPEG4_AUDIO_TYPE MP4_MPEG2_AAC_LC_AUDIO_TYPE
	MP4TrackId audio = MP4AddAudioTrack(file_handle, timeScale, 1024, MP4_MPEG2_AAC_LC_AUDIO_TYPE);
	if (audio == MP4_INVALID_TRACK_ID) {
		Java_com_wang_cameratest_MainActivity_closeMp4File(file_handle);
		return -2;
	}
	char *ubuffer;
	bool result = MP4SetTrackESConfiguration(file_handle, audio, ubuffer, 2);
	//LOGI("Wang MP4SetTrackESConfiguration result =   %d",result);
	MP4SetAudioProfileLevel(file_handle, 0x2);
	return audio;
}

void Java_com_wang_cameratest_MainActivity_writeH264Data(JNIEnv* env, jobject thiz, jlong file, jlong videoTrackId, jbyteArray data,
		jint size) {

	unsigned char* buf = (unsigned char *) (*env)->GetByteArrayElements(env, data, JNI_FALSE);
	int nalsize = size;
	buf[0] = (nalsize & 0xff000000) >> 24;
	buf[1] = (nalsize & 0x00ff0000) >> 16;
	buf[2] = (nalsize & 0x0000ff00) >> 8;
	buf[3] = nalsize & 0x000000ff;

	bool b = MP4WriteSample(file, videoTrackId, buf, size + 4, MP4_INVALID_DURATION, 0, true);
	(*env)->ReleaseByteArrayElements(env, data, (jbyte *) buf, 0);
	return 0;
}

void Java_com_wang_cameratest_MainActivity_writeAudioData(JNIEnv* env, jobject thiz, jlong fileHandle, jlong audioTrackId, jbyteArray data,
		jint size) {

	uint8_t *bufaudio = (uint8_t *) (*env)->GetByteArrayElements(env, data, JNI_FALSE);
	//MP4WriteSample(fileHandle, audioTrackId, &bufaudio[7], size - 7, MP4_INVALID_DURATION, 0, 1);
	//if(PCM2AAC(bufaudio,size)==1){
		MP4WriteSample(fileHandle, audioTrackId, &bufaudio[7], size-7, MP4_INVALID_DURATION, 0, 1);
	//}
	(*env)->ReleaseByteArrayElements(env, data, (jbyte *) bufaudio, 0);
}

void Java_com_wang_cameratest_MainActivity_closeMp4File(JNIEnv* env, jobject thiz, jlong fileHandler) {
	MP4Close(fileHandler, 0);
	//unInitFFmpeg();
}
mp4v2的使用参考 MP4V2 录制mp4(h264+aac)视频,。

我个人没什么C/C++基础,所以不懂怎么讲,还有个人写代码不喜欢写注释,代码的出处都是别人的,具体的地址忘记了,谢谢各位分享。我是在别人的基础上修改了部分代码的(不知道有多少人和我一样的状况呢),不足之处请见谅!

initEncoder(int width, int height, int fps)这个函数主要是初始化x264和设置x264的一些参数,各位请参看官方api、代码吧!mp4v2这一块的api就比较简单,但是

 MP4SetTrackESConfiguration(...)这个函数的第三个参数不知道如何设置,我找了好久都不知道如何去计算。
    接下来是Java代码:

package com.wang.cameratest;

import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.util.Timer;
import java.util.TimerTask;

import android.app.Activity;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.View;
import android.view.View.OnLongClickListener;
import android.view.View.OnTouchListener;
import android.view.Window;
import android.view.WindowManager;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
import android.widget.Button;

@SuppressWarnings("deprecation")
public class MainActivity extends Activity implements Callback, PictureCallback, PreviewCallback, OnLongClickListener, OnTouchListener,
		AutoFocusCallback {
	private SurfaceView surfaceView;
	private SurfaceHolder surfaceHolder;
	private Camera camera;
	private Button button;
	private Parameters parameters;
	private boolean b_init = false;
	private long x264_encode_handler;
	private byte[] out_buffers;
	final int START=0x01;
	final int STOP=0x02;
	
	
	Timer timer=new Timer();
	class Task extends TimerTask{

		@Override
		public void run() {
			// TODO Auto-generated method stub
			b_init = false;
			//Toast.makeText(getApplicationContext(), "松开", Toast.LENGTH_SHORT).show();
			EncoderEnd(x264_encode_handler);
			stop();
			closeMp4File(fileHandle);
			fileHandle = 0;
			videoTrackId = 0;
			audioTrackId = 0;
			x264_encode_handler = 0;
			timer.cancel();
		}
		
	}

	@Override
	protected void onCreate(Bundle savedInstanceState) {
		super.onCreate(savedInstanceState);
		getWindow().setFormat(PixelFormat.TRANSLUCENT);
		this.requestWindowFeature(Window.FEATURE_NO_TITLE);
		getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
		setContentView(R.layout.activity_main);
		initView();
	}

	private void initView() {
		surfaceView = (SurfaceView) findViewById(R.id.surface);
		button = (Button) findViewById(R.id.button_record);
		button.setOnLongClickListener(this);
		button.setOnTouchListener(this);
		surfaceHolder = surfaceView.getHolder();
		surfaceHolder.addCallback(this);
		surfaceHolder.setKeepScreenOn(true);
		surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
		surfaceView.setOnTouchListener(new OnTouchListener() {

			@Override
			public boolean onTouch(View v, MotionEvent event) {
				// TODO Auto-generated method stub
				autoFocus();
				return false;
			}
		});
	}

	private void autoFocus() {
		if (camera != null) {
			camera.autoFocus(this);
		}
	}

	@Override
	public void onPictureTaken(byte[] arg0, Camera arg1) {
		// TODO Auto-generated method stub

	}

	@Override
	public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
		// TODO Auto-generated method stub
		try {
			parameters = camera.getParameters();
			parameters.setPreviewSize(640, 480);
			//parameters.setPictureFormat(PixelFormat.JPEG);
			parameters.setPreviewFormat(PixelFormat.YCbCr_420_SP);
			camera.setDisplayOrientation(90);
			camera.setParameters(parameters);
			camera.setPreviewDisplay(surfaceHolder);
			camera.setPreviewCallback(this);
			camera.startPreview();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

	@Override
	public void surfaceCreated(SurfaceHolder arg0) {
		// TODO Auto-generated method stub
		if (camera == null) {
			camera = Camera.open();
		}
	}

	@Override
	public void surfaceDestroyed(SurfaceHolder arg0) {
		// TODO Auto-generated method stub
		if (camera != null) {
			camera.setPreviewCallback(null);
			camera.stopPreview();
			camera.release();
			camera = null;
		}
	}

	RandomAccessFile raf = null;
	int result;

	@Override
	public void onPreviewFrame(byte[] data, Camera camera) {
		if (b_init) {
			result = EncodeBuffer(x264_encode_handler, data, data.length, out_buffers);
			if (result > 0) {
				writeH264Data(fileHandle, videoTrackId, out_buffers, result);
			}
		}
	}

	@Override
	public boolean onLongClick(View v) {
		// TODO Auto-generated method stub
		if (!b_init) {
			String path = createVideo(System.currentTimeMillis() + ".mp4");
			Log.i("WangTest", "按钮按下  " + path);
			autoFocus();
			Size size = camera.getParameters().getPreviewSize();
			x264_encode_handler = EncoderInit(size.width, size.height, 20);
			out_buffers = new byte[size.width * size.height * 3 / 2];
			audioTrackId = addAudioTrack(fileHandle, KEY_SAMPLE_RATE);
			start();
			videoTrackId = addVideoTrack(x264_encode_handler, fileHandle);
			timer.schedule(new Task(), 5000);
			b_init = true;
		}
		return false;
	}

	private void startRecord() {
	}
	
	String MIME_TYPE = "audio/mp4a-latm";
	int KEY_CHANNEL_COUNT = 2;
	int KEY_SAMPLE_RATE = 44100;
	int KEY_BIT_RATE = 64000;
	int KEY_AAC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC;
	int WAIT_TIME = 10000;

	int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
	int CHANNEL_MODE = AudioFormat.CHANNEL_IN_STEREO;

	int BUFFFER_SIZE = 2048;
	
	
	private Worker mWorker;
	private final String TAG = "AudioEncoder";
	private byte[] mFrameByte;


	public void start() {
		if (mWorker == null) {
			mWorker = new Worker();
			mWorker.setRunning(true);
			mWorker.start();
		}

	}

	public void stop() {
		if (mWorker != null) {
			mWorker.setRunning(false);
			mWorker = null;
		}
	}

	private String createAACFile() {
		String pathString = "";
		File file = new File(Environment.getExternalStorageDirectory(), System.currentTimeMillis() + ".aac");
		if (!file.exists()) {
			try {
				if (file.createNewFile()) {
					pathString = file.getAbsolutePath();
				}
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		Log.i("Wang", "wang :  " + pathString);
		return pathString;
	}

	private class Worker extends Thread {
		private final int mFrameSize = 2048;
		private byte[] mBuffer;
		private boolean isRunning = false;
		private MediaCodec mEncoder;
		private AudioRecord mRecord;
		MediaCodec.BufferInfo mBufferInfo;

		//FileChannel fileChannel;

		@Override
		public void run() {
			if (!prepare()) {
				Log.d(TAG, "音频编码器初始化失败");
				isRunning = false;
			}
//			try {
//				fileChannel = new FileOutputStream(createAACFile()).getChannel();
//			} catch (FileNotFoundException e) {
//				// TODO Auto-generated catch block
//				e.printStackTrace();
//			}
			while (isRunning) {
				int num = mRecord.read(mBuffer, 0, mFrameSize);
				encode(mBuffer);

			}
			release();
		}

		public void setRunning(boolean run) {
			isRunning = run;
		}

		/**
		 * 释放资源
		 */
		private void release() {
			if (mEncoder != null) {
				mEncoder.stop();
				mEncoder.release();
			}
			if (mRecord != null) {
				mRecord.stop();
				mRecord.release();
				mRecord = null;
			}
//			try {
//				if (fileChannel.isOpen()) {
//					fileChannel.close();
//				}
//			} catch (IOException e) {
//				// TODO Auto-generated catch block
//				e.printStackTrace();
//			}

		}

		/**
		 * 连接服务端,编码器配置
		 * 
		 * @return true配置成功,false配置失败
		 */
		private boolean prepare() {
			try {

				mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
				MediaFormat mediaFormat = MediaFormat.createAudioFormat(MIME_TYPE, KEY_SAMPLE_RATE, KEY_CHANNEL_COUNT);
				mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, KEY_BIT_RATE);
				mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, KEY_AAC_PROFILE);
				mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 8192);
				mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
				mEncoder.start();
			} catch (IOException e) {
				e.printStackTrace();
				return false;
			}
			mBuffer = new byte[mFrameSize];
			int minBufferSize = AudioRecord.getMinBufferSize(KEY_SAMPLE_RATE, CHANNEL_MODE, AUDIO_FORMAT);
			mRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, KEY_SAMPLE_RATE, CHANNEL_MODE, AUDIO_FORMAT, minBufferSize * 2);
			mRecord.startRecording();
			return true;
		}

		private void encode(byte[] data) {
			int inputBufferIndex = mEncoder.dequeueInputBuffer(-1);
			if (inputBufferIndex >= 0) {
				ByteBuffer inputBuffer = mEncoder.getInputBuffer(inputBufferIndex);
				inputBuffer.clear();
				inputBuffer.put(data);
				inputBuffer.limit(data.length);
				mEncoder.queueInputBuffer(inputBufferIndex, 0, data.length, System.nanoTime(), 0);
			}
			mBufferInfo = new MediaCodec.BufferInfo();
			int outputBufferIndex = mEncoder.dequeueOutputBuffer(mBufferInfo, 0);
			while (outputBufferIndex >= 0) {
				// 给adts头字段空出7的字节
				int outBitSize = mBufferInfo.size;
				int length = outBitSize + 7;
				ByteBuffer outputBuffer = mEncoder.getOutputBuffer(outputBufferIndex);

				outputBuffer.position(mBufferInfo.offset);
				outputBuffer.limit(mBufferInfo.offset + mBufferInfo.size);

				mFrameByte = new byte[length];
				addADTStoPacket(mFrameByte, length);

				outputBuffer.get(mFrameByte, 7, outBitSize);
				outputBuffer.position(mBufferInfo.offset);
//				try {
//					fileChannel.write(ByteBuffer.wrap(mFrameByte));
//				} catch (IOException e) {
//					e.printStackTrace();
//				}
				writeAudioData(fileHandle, audioTrackId, mFrameByte, mFrameByte.length);
				mEncoder.releaseOutputBuffer(outputBufferIndex, false);
				outputBufferIndex = mEncoder.dequeueOutputBuffer(mBufferInfo, 0);
			}
		}

		/**
		 * 给编码出的aac裸流添加adts头字段
		 * 
		 * @param packet
		 *            要空出前7个字节,否则会搞乱数据
		 * @param packetLen
		 */
		private void addADTStoPacket(byte[] packet, int packetLen) {
			int profile = 2; // AAC LC
			int freqIdx = 4; // 44.1KHz
			int chanCfg = 2; // CPE
			packet[0] = (byte) 0xFF;
			packet[1] = (byte) 0xF9;
			packet[2] = (byte) (((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2));
			packet[3] = (byte) (((chanCfg & 3) << 6) + (packetLen >> 11));
			packet[4] = (byte) ((packetLen & 0x7FF) >> 3);
			packet[5] = (byte) (((packetLen & 7) << 5) + 0x1F);
			packet[6] = (byte) 0xFC;
		}
	}


	private String createVideo(String name) {
		String pName = "";
		try {
			File pFile = new File(Environment.getExternalStorageDirectory() + "/test/");
			if (!pFile.exists()) {
				pFile.mkdirs();
			}
			File nFile = new File(pFile, name);
			if (!nFile.exists()) {
				nFile.createNewFile();
				pName = nFile.getAbsolutePath();
				fileHandle = createMp4File(pName);

			}
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return pName;
	}

	private long fileHandle;
	private long videoTrackId;
	private long audioTrackId;

	public native long EncoderInit(int width, int height, int fps);

	public native int EncodeBuffer(long handle, byte[] in_buffers, int size, byte[] out_buffers);

	public native int EncoderEnd(long pHandler);

	public native long createMp4File(String fileName);

	public native long addVideoTrack(long enc, long file);

	public native long addAudioTrack(long file, int timeScale);

	public native void writeH264Data(long fileHandler, long videoTrackId, byte[] data, int sz);

	public native void writeAudioData(long fileHandle, long trackId, byte[] data, int size);

	public native void closeMp4File(long fileHandler);

	@Override
	public boolean onTouch(View v, MotionEvent event) {
		// TODO Auto-generated method stub
		int action = event.getAction();
		switch (action) {
		case MotionEvent.ACTION_UP:
//			b_init = false;
//			Toast.makeText(getApplicationContext(), "松开", Toast.LENGTH_SHORT).show();
//			EncoderEnd(x264_encode_handler);
//			stop();
//			closeMp4File(fileHandle);
//			fileHandle = 0;
//			videoTrackId = 0;
//			audioTrackId = 0;
//			x264_encode_handler = 0;
			break;

		default:
			break;
		}
		return false;
	}

	static {
		System.loadLibrary("ffmpeg");
		System.loadLibrary("ffwithx264");
	}

	@Override
	public void onAutoFocus(boolean success, Camera camera) {
		// TODO Auto-generated method stub

	}

	@Override
	protected void onDestroy() {
		// TODO Auto-generated method stub
		super.onDestroy();
	}

}

java的主要代码是获取camera的数据和获取pcm数据,并 通过MediaCodec编码pcm音频数据为aac格式的数据(因为mp4v2不能直接写pcm数据),然后分别把数据传入jni层,encodeBuffer(JNIEnv*env, jlong handle, jbyteArray in, jbyteArray out)负责把yuv数据转为H.264数据并返回转换的长度,

(long fileHandler, long videoTrackId, byte[] data, int sz)负责把H.264视频数据写入到mp4文件中,writeAudioData(long fileHandle, long trackId, byte[] data, int size)负责把aac格式的音频数据写入到mp4文件中。

发现问题:音视频同步有问题,画面跳帧,颜色不对、视频体积大等问题。

最后谢谢各位无私分享的大神,缅怀雷神。

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值