Android Camera预览时输出的帧率控制

如果使用MediaCodec硬编码H264,可以使用下面的方法控制编码输出的帧率:

        MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);  
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);      
        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5);  
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);  
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);  
        try {  
            mediaCodec = MediaCodec.createEncoderByType("video/avc");  
        } catch (IOException e) {  
            // TODO Auto-generated catch block  
            e.printStackTrace();  
        }  
        mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);  
        mediaCodec.start();  

但如果是采用预览模式下得到预览的BITMAP,然后通过x264编码的来实现的方式,则需要应用层控制预览帧的帧帧率,Camera本来提供了两个接口来控制预览帧率,但从多个平台的适配发现,基本上都不能有效的控制Camera预览输出的帧率:

setPreviewFrameRate是在api level1就开始使用了,然后不是简单地设置这个方法就可以让摄像头每秒捕获多少帧数的。 
比如我设置2,它一秒不会只捕获2帧数据的,从日志记录来看,相当糟糕,不会是预期的2帧,于是我查找文档,发现这个方法已经废除了。 

在api level9时加入了一个方法setPreviewFpsRange (int min, int max) 
预览帧数从min到max,这个值再*1000. 
这个方法已经在高版本的sdk中取代了旧的setPreviewFrameRate。 



====================2017-05-15 更新 ===================

当初把这个问题复杂化了,其实对预览帧的回调做下控制就能达到该目的,代码:

	private final static int MAX_FPS = 15;    //视频通话控制在15帧是足够的 
	private final static int FRAME_PERIOD = (1000 / MAX_FPS); // the frame period
	long lastTime = 0;  
	long timeDiff = 0;  
	int framesSkipped = 0; // number of frames being skipped 
	int framesRecevied = 0; // number of frames being skipped
	int framesSended = 0; // number of frames being skipped 

	private PreviewCallback previewCallback = new PreviewCallback() {

		public void onPreviewFrame(byte[] _data, Camera _camera) { 
				timeDiff = System.currentTimeMillis() - lastTime;
				framesRecevied++;
				if (timeDiff < FRAME_PERIOD){
					framesSkipped++;

					if (NgnProxyVideoProducer.sAddCallbackBufferSupported) {
						// do not use "_data" which could be null (e.g. on GSII)
						NgnCameraProducer.addCallbackBuffer(_camera,
								_data == null ? mVideoCallbackData : _data);
					}
					Log.d(TAG, "ii loglized diserved framesSkipped:"+framesSkipped + ",framesRecevied:"+framesRecevied + ", framesSended:"+framesSended); 
					return;
				}
				lastTime = System.currentTimeMillis();
				framesSended++;
				// add end.


//doing other thing.
				
}




====================2017-05-15 更新 ===================



上面是文章:  Android camera 预览帧数和视频通话图片缓存 中提到为什么预览帧设置失效的问题,并且也给出了一个控制预览帧的方式,这里提供另外一种类似的实现:无锁队列的实现。

下面 RingBuffer的定义:

	final byte STATU_INIT = 0; 
	final byte STATU_WAIT_DEQEUE = 1; 
	class UserDefinedBuffer{
		ByteBuffer mVideoFrame;
		byte status;
	}
	//the ring queue
	class RingBuffer{
		int r_index;
		int w_index;
		int size;
		UserDefinedBuffer[] mUserDefinedBuffer; 
		long last_time;
	
		public RingBuffer(int max_size, int capacity){			
			mUserDefinedBuffer = new UserDefinedBuffer[max_size]; 
			r_index = w_index = 0;
			size = max_size;
			for(int i=0 ;i<max_size; i++){
				mUserDefinedBuffer[i] = new UserDefinedBuffer();
				mUserDefinedBuffer[i].mVideoFrame = ByteBuffer.allocateDirect(capacity); 
			}						
		}
		
		public UserDefinedBuffer getUserDefinedBuffer(int index){
			return mUserDefinedBuffer[index];
		}
		
		int getRingW(){
			return w_index;
		}
		
		int getRingR(){
			return r_index;
		}
		int getRingSize(){
			return size;
		}
		  
		void  setUserDefinedBufferStatus(int index, byte status){
			synchronized(mUserDefinedBuffer[index]){
				mUserDefinedBuffer[index].status = status;
			}
		}
		
		byte getUserDefinedBufferStatus(int index){
			synchronized(mUserDefinedBuffer[index]){
				return mUserDefinedBuffer[index].status;
			} 
		}
		
		void enqueue(byte[] _data){		 
			int index = w_index & (size -1);
			Log.i(TAG, "#enqueue, index:"+index);
			if (index >= size){
				index = 0;
			}
			if (getUserDefinedBufferStatus(index) != STATU_INIT){
				Log.i(TAG, "i enqueue, index:"+index+", not dequeue" + ", STATUS:"+getUserDefinedBufferStatus(index));
				return;
			}
			setUserDefinedBufferStatus(index, STATU_WAIT_DEQEUE);
			mUserDefinedBuffer[index].mVideoFrame.rewind();
			mUserDefinedBuffer[index].mVideoFrame.put(_data);
			w_index += 1; 		
		}
		
		void enqueue(ByteBuffer data){			 
			int index = w_index & (size -1);
			Log.i(TAG, "enqueue, index:"+index);
			if (index >= size){
				index = 0;
			}
			if (getUserDefinedBufferStatus(index) != STATU_INIT){
				Log.i(TAG, "ii enqueue, index:"+index+", not dequeue" + ", STATUS:"+getUserDefinedBufferStatus(index));
				return;
			}
			setUserDefinedBufferStatus(index, STATU_WAIT_DEQEUE);
			mUserDefinedBuffer[index].mVideoFrame.rewind();
			mUserDefinedBuffer[index].mVideoFrame.put(data); 
			w_index += 1; 
			//last_time = System.currentTimeMillis();
		}     
		
		long getLastTime(){
			return last_time;
		}
		
		int dequeue(){		 
			int index = r_index & (size -1); 
			if (index == (w_index & (size -1))){
				Log.i(TAG, "dequeue, w_index:"+w_index + ", r_index:"+r_index);
				return -1;
			}
			Log.i(TAG, "dequeue, index:"+index);
			
			r_index += 1; 
//			ByteBuffer data =  mUserDefinedBuffer[index].mVideoFrame;
//			mUserDefinedBuffer[index].mVideoFrame.rewind();
			return index;  
		} 
	};

 出队线程: 

	class PushVideoThread extends Thread{  
		boolean mExitFlag = false;  
		
		public void setExitFlg(boolean bFlag){
			mExitFlag = bFlag;
		}
		 
	    @Override
	    public void run() {
			android.os.Process
			.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

	    	Log.i(TAG, "PushVideoThread() run start.");
			final int delay = (100/mFps);//
	    	while(!mExitFlag){
	            long start=System.currentTimeMillis();  
	            if (mRingBuffer == null){
					try {
						Thread.sleep(delay);
					} catch (Exception e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}	  
					continue;
	            }
				int index = mRingBuffer.dequeue(); 
				if (index == -1){
					try {
						Thread.sleep(delay);
					} catch (Exception e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}	  
					continue;
				}
				if (STATU_WAIT_DEQEUE != mRingBuffer.getUserDefinedBufferStatus(index)){

				    Log.i(TAG, "Ana  dequeue mRingBuffer.getUserDefinedBufferStatus(index):"+mRingBuffer.getUserDefinedBufferStatus(index));

					try {
						Thread.sleep(delay);
					} catch (Exception e) {
						// TODO Auto-generated catch block
						e.printStackTrace();
					}	  
					continue;
				}
				UserDefinedBuffer userDefindedBuffer = mRingBuffer.getUserDefinedBuffer(index);
				
	    		ByteBuffer byteBuffer = userDefindedBuffer.mVideoFrame;
	    		if (byteBuffer != null){
					framesRendered++;
					if ((framesRendered % 100) == 0) {
						logStatistics();
						framesRendered = 0;
						startTimeNs= System.nanoTime();
					}
//				    Log.i(TAG, "Ana  dequeue getRingW:"+ write +",getRingR:"+ read);
	    			mProducer.push(byteBuffer, mVideoFrame.capacity());
	    			mRingBuffer.setUserDefinedBufferStatus(index, STATU_INIT);
	    		}

                long end=System.currentTimeMillis();  
                if ((end - start) < delay && (end - start) > 0){
        			try {
        				long value = delay - (end -start);
        				if (value > 0){
        					Thread.sleep(value);
        				}
    				} catch (Exception e) {
    					// TODO Auto-generated catch block
    					e.printStackTrace();
    				} 
                }
	    	}
	    	Log.i(TAG, "PushVideoThread() run End.");
	    }
	}

RingBuffer的初始化:

	static final int MAX_SIZE = 64;//must 2 mul
	RingBuffer mRingBuffer;	 
	void initRingBuffer(){   
		mRingBuffer = new RingBuffer(MAX_SIZE, mVideoFrame.capacity()); 
	}
	
入队:
public void onPreviewFrame(byte[] _data, Camera _camera) {
mRingBuffer.enqueue(_data);
}





====================2017-05-15 更新 ===================


评论 6
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值