android 使用X264编码视频
源码刚上传可能审核
源码下载地址
X264对应部分API介绍
初始化x264_param_t
_x264_param = new x264_param_t;
/**
* preset是编码速度
* 可选项"ultrafast", "superfast", "veryfast", "faster", "fast", "medium", "slow",
* "slower", "veryslow", "placebo",从最快到最慢。无特殊要求选fast即可。
*
* tune是编码质量和画面细节相关的参数。
*可选项"film"电影, "animation"动画, "grain"颗粒, "stillimage"静态图像,
* "psnr"PSNR测试, "ssim"SSIM测试, "fastdecode"快速解码, "zerolatency"零延迟这几种。
*
*/
x264_param_default_preset(_x264_param, "fast" , "zerolatency");//设置preset和tune
码率的控制
if (bite == 0) {
bite = 1;
}
if (bite > 0 && bite <= 64) {
bitratelevel = BIT_LOW_LEVEL;
} else if (bite > 64 && bite <= 128) {
bitratelevel = BIT_MEDIUM_LEVEL;
} else if (bite > 128 && bite <= 256) {
bitratelevel = BIT_STANDARD_LEVEL;
} else if (bite > 256 && bite <= 384) {
bitratelevel = BIT_HIGH_LEVEL;
} else if (bite > 384 && bite <= 512) {
bitratelevel = BIT_HIGH_LEVEL;
} else {
bitratelevel = BIT_STANDARD_LEVEL;
}
/**
* f_rf_constant图像质量控制是实际质量,越大图像越花,越小越清晰。
*/
if (bitratelevel == BIT_LOW_LEVEL) {
_x264_param->rc.f_rf_constant = 32;
} else if (bitratelevel == BIT_MEDIUM_LEVEL) {
_x264_param->rc.f_rf_constant = 29;
} else if (bitratelevel == BIT_STANDARD_LEVEL) {
_x264_param->rc.f_rf_constant = 26;
} else if (bitratelevel == BIT_HIGH_LEVEL) {
_x264_param->rc.f_rf_constant = 24;
} else {
_x264_param->rc.f_rf_constant = 24;
}
码率控制模式、宽高、IDR帧间隔、帧率(X264_CSP_BGRA是RGBA编码)
// 重复SPS/PPS 放到关键帧前面 该参数设置是让每个I帧都附带sps/pps。
_x264_param->b_repeat_headers = 1;
//参数i_rc_method表示码率控制,CQP(恒定质量),CRF(恒定码率),ABR(平均码率)
_x264_param->rc.i_rc_method = X264_RC_CRF;
_x264_param->i_width = width;
_x264_param->i_height = height;
_x264_param->i_frame_total = 0; /* 编码帧的总数, 默认 0 */
_x264_param->i_keyint_max = 3; /* 在此间隔设置IDR关键帧 */
_x264_param->i_fps_den = 1; //* 帧率分母
_x264_param->i_fps_num = fps; //* 帧率分子
_x264_param->i_timebase_den = _x264_param->i_fps_num;
_x264_param->i_timebase_num = _x264_param->i_fps_den;
_x264_param->i_cqm_preset = X264_CQM_FLAT; /*自定义量化矩阵(CQM),初始化量化模式为flat*/
参考帧和B帧设置、编码线程设置、采用什么格式编码
xfps= fps;
xheight = height;
xwidth = width;
_x264_param->analyse.i_me_method = X264_ME_HEX; /* 运动估计算法 (X264_ME_*) */
_x264_param->analyse.i_subpel_refine = 2; /* 亚像素运动估计质量 */
_x264_param->i_frame_reference = 1; /* 参考帧最大数目 */
_x264_param->analyse.b_mixed_references = 0;/*允许每个宏块的分区在P帧有它自己的参考号*/
/* Trellis量化,对每个8x8的块寻找合适的量化值,需要CABAC,默认0 0:关闭1:只在最后编码时使用2:一直使用*/
_x264_param->analyse.i_trellis = 0;
_x264_param->b_sliced_threads = 0;
_x264_param->i_threads = 4;/* 并行编码多帧 */
//i_threads = N并行编码的时候如果b_sliced_threads=1那么是并行slice编码,
//如果b_sliced_threads=0,那么是并行frame编码。并行slice无延时,并行frame有延时
_x264_param->analyse.b_transform_8x8 = 0;/* 帧间分区*/
_x264_param->b_cabac = 0;
_x264_param->b_deblocking_filter =1;/*去块滤波器需要的参数*/
_x264_param->psz_cqm_file = NULL;
_x264_param->analyse.i_weighted_pred = X264_WEIGHTP_NONE;
_x264_param->rc.i_lookahead = 10;
_x264_param->i_bframe = 0;/*两个相关图像间P帧的数目 */
#if CODEC_X264_BGRA
_x264_param->i_csp = X264_CSP_BGRA; /* 编码比特流的CSP,仅支持i420,色彩空间设置 */
#else
#endif
_x264_param参数初始化结束后直接打开编码器
_x264_encoder = x264_encoder_open( _x264_param );
创建相关的编码结构和编码后输出结构体(RGBA和YUV420p两种格式)
_in_pic = new x264_picture_t;
_out_pic = new x264_picture_t;
x264_picture_init(_out_pic);
#if CODEC_X264_BGRA
x264_picture_alloc(_in_pic, X264_CSP_BGRA, _x264_param->i_width, _x264_param->i_height);
_in_pic->img.i_csp = X264_CSP_BGRA;
_in_pic->img.i_plane = 1;
_in_pic->img.i_stride[0] = 4 * _x264_param->i_width;
#else
x264_picture_alloc(_in_pic, X264_CSP_I420, _x264_param->i_width, _x264_param->i_height);
_in_pic->img.i_csp = X264_CSP_I420;
_in_pic->img.i_plane = 3;
#endif
把获取到数据进去相关格式拷贝到编码结构体x264_picture_t,然后进行编码输出相关编码数据
void x264Encode::startEncoder(uint8_t * dataptr, char *&bufdata,int &buflen, int &isKeyFrame)
{
int width = xheight;
int height =xwidth;
#if CODEC_X264_BGRA
if(src_data_y == NULL){
src_data_y = (uint8_t *)malloc(width*height);
}
if(src_data_u == NULL){
src_data_u = (uint8_t *)malloc(width/2*height/2);
}
if(src_data_v == NULL){
src_data_v = (uint8_t *)malloc(width/2*height/2);
}
if(dst_data == NULL){
dst_data =(uint8_t *) malloc(width*height*4);
}
//拷贝y分量
memcpy(src_data_y, dataptr, width * height);
//拷贝u分量
memcpy(src_data_u, dataptr+width*height, width*height/4);
//拷贝v分量
memcpy(src_data_v, dataptr+width*height*5/4, width*height/4);
libyuv::I420ToARGB(src_data_y,width,src_data_u,width/2,src_data_v,width/2,dst_data,width*4,width,height);
memcpy(_in_pic->img.plane[0], dst_data,width*height*4);
#else
memcpy(_in_pic->img.plane[0], dataptr, width * height);
memcpy(_in_pic->img.plane[1], dataptr+width*height, width*height/4);
memcpy(_in_pic->img.plane[2], dataptr+width*height*5/4, width*height/4);
#endif
_in_pic->i_type = X264_TYPE_AUTO;
_in_pic->i_qpplus1 = 0;
_in_pic->param = _x264_param;
_in_pic->i_pts = 0;
x264_nal_t *nal;
int i_nal = 0;
int Result;
if (_x264_encoder != NULL) {
Result = x264_encoder_encode(_x264_encoder, &nal, &i_nal, _in_pic, _out_pic);
isKeyFrame = _out_pic->b_keyframe;
_in_pic->i_pts++;
} else{
}
if (Result < 0) {
LOGE("=======encoder faild=========");
}
else if (Result == 0)
{
//LOGI("/**********************编码成功,但被缓存了************************");
}
else
{
LOGE("=======encoder succees=========");
int bufsize = 0;
for (int i=0; i<i_nal; i++) {
bufsize += nal[i].i_payload;
}
char * tempdata = new char[bufsize];
memset(tempdata, 0, bufsize);
bufdata = tempdata;
for (int i=0; i<i_nal; i++) {
if (nal[i].p_payload != NULL) {
memcpy(tempdata, nal[i].p_payload, nal[i].i_payload);
tempdata+=nal[i].i_payload;
}
}
buflen = bufsize;
}
}
编码结束后退出进行刷新把编码器所以数据编码出来
void x264Encode::Flush()
{
x264_picture_t pic_out;
x264_nal_t * nal;
int i_nal;
if (x264_encoder_encode(_x264_encoder, &nal, &i_nal, NULL, &pic_out)<0) {
}
}
ndk接口
public class x264Tool {
public interface listener
{
void h264data(byte[] buffer, int length);
}
private listener _listener;
public x264Tool(listener l){
_listener = l;
}
static {
System.loadLibrary("x264encoder");
}
public void PushOriStream(byte[] buffer, int length, long time)
{
encoderH264(buffer,length, time);
}
public native void initX264Encode(int width, int height, int fps, int bite);
public native int encoderH264(byte[] buffer,int length, long time);
public native void CloseX264Encode();
private void H264DataCallBackFunc(byte[] buffer, int length){
_listener.h264data(buffer, length);
}
}
Android Camera的调用获取相关的照相机数据
package com.test.x264encoderdemo;
import android.Manifest;
import android.annotation.TargetApi;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import example.mg.x264.x264Tool;
public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback,Camera.PreviewCallback {
private SurfaceView surfaceview;
private SurfaceHolder surfaceHolder;
private Camera camera;
private Parameters parameters;
private int width = 640;
private int height = 480;
private int fps = 20;
private int bitrate = 90000;
private x264Tool x264;
private int timespan = 90000 / fps;
private long time;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (ContextCompat.checkSelfPermission(this,
Manifest.permission.READ_CONTACTS)!= PackageManager.PERMISSION_GRANTED||
ContextCompat.checkSelfPermission(this,
Manifest.permission.WRITE_EXTERNAL_STORAGE)!= PackageManager.PERMISSION_GRANTED||
ContextCompat.checkSelfPermission(this,
Manifest.permission.RECORD_AUDIO)!= PackageManager.PERMISSION_GRANTED||
ContextCompat.checkSelfPermission(this,
Manifest.permission.INTERNET)!= PackageManager.PERMISSION_GRANTED||
ContextCompat.checkSelfPermission(this,
Manifest.permission.CAMERA)!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.RECORD_AUDIO,
Manifest.permission.READ_CONTACTS,
Manifest.permission.INTERNET,
Manifest.permission.CAMERA}, 100);
}
surfaceview = (SurfaceView)findViewById(R.id.surfaceview);
surfaceHolder = surfaceview.getHolder();
surfaceHolder.addCallback(this);
x264 = new x264Tool(l);
createfile();
}
private x264Tool.listener l = new x264Tool.listener(){
@Override
public void h264data(byte[] buffer, int length) {
// TODO Auto-generated method stub
try {
//Log.e("========","=======length==="+length);
outputStream.write(buffer, 0, buffer.length);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
};
private static String path ="/storage/emulated/0/DCIM/aa/test.h264";
private BufferedOutputStream outputStream;
FileOutputStream outStream;
private void createfile(){
File file = new File(path);
if(file.exists()){
file.delete();
}
try {
outputStream = new BufferedOutputStream(new FileOutputStream(file));
} catch (Exception e){
e.printStackTrace();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
// TODO Auto-generated method stub
time += timespan;
byte[] yuv420 = new byte[width*height*3/2];
YUV420SP2YUV420(data,yuv420,width,height);
x264.PushOriStream(yuv420, yuv420.length, time);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
x264.initX264Encode(width, height, fps, bitrate);
camera = getBackCamera();
startcamera(camera);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// TODO Auto-generated method stub
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
if (null != camera) {
camera.setPreviewCallback(null);
camera.stopPreview();
camera.release();
camera = null;
}
x264.CloseX264Encode();
try {
outputStream.flush();
outputStream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private void startcamera(Camera mCamera){
if(mCamera != null){
try {
mCamera.setPreviewCallback(this);
mCamera.setDisplayOrientation(90);
if(parameters == null){
parameters = mCamera.getParameters();
}
parameters = mCamera.getParameters();
parameters.setPreviewFormat(ImageFormat.NV21);
parameters.setPreviewSize(width, height);
mCamera.setParameters(parameters);
mCamera.setPreviewDisplay(surfaceHolder);
mCamera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
}
Bitmap getBitmap(byte[] data,int height,int width){
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
ByteBuffer bufffer = ByteBuffer.wrap(data);
//bufffer.position(0);
bufffer.rewind();
bufffer.position(0);
bitmap.copyPixelsFromBuffer(bufffer);
return bitmap;
}
@TargetApi(9)
private Camera getBackCamera() {
Camera c = null;
try {
c = Camera.open(0); // attempt to get a Camera instance
} catch (Exception e) {
e.printStackTrace();
}
return c; // returns null if camera is unavailable
}
private void YUV420SP2YUV420(byte[] yuv420sp, byte[] yuv420, int width, int height)
{
if (yuv420sp == null ||yuv420 == null)return;
int framesize = width*height;
int i = 0, j = 0;
//copy y
for (i = 0; i < framesize; i++)
{
yuv420[i] = yuv420sp[i];
}
i = 0;
for (j = 0; j < framesize/2; j+=2)
{
yuv420[i + framesize*5/4] = yuv420sp[j+framesize];
i++;
}
i = 0;
for(j = 1; j < framesize/2;j+=2)
{
yuv420[i+framesize] = yuv420sp[j+framesize];
i++;
}
}
}