Android音视频通话


前言

Android局域网音视频通话。视频编码采用H264,音频编码采用开源库speex,以及音频消回声通过webrtc。由于音频需要编码,消回声,所以通讯方式采用c++编写。


一、准备工作

1、编写Jni接口

package com.cmy.media;

public class AVService {

	static {
		System.loadLibrary("cmy_media");
	}

	public native boolean init();

	public native boolean startServer();

	public native void stopServer();

	public native void sendVideoToRemote(String ip, byte[] buff, int len);

	public native void sendAudioToRemote(String ip, short[] buff, int len);

	private Callback callback;

	public void setCallback(Callback callback) {
		this.callback = callback;
	}

	public void onVideoCallback(byte[] data, int len) {
		if (null != callback)
			callback.onVideoCallback(data, len);
	}

	public void onAudioCallback(short[] data, int len) {
		if (null != callback)
			callback.onAudioCallback(data, len);
	}

	public interface Callback {

		void onVideoCallback(byte[] data, int len);

		void onAudioCallback(short[] data, int len);

	}

}

2、通过javah工具生成头文件

/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class com_cmy_media_AVService */

#ifndef _Included_com_cmy_media_AVService
#define _Included_com_cmy_media_AVService
#ifdef __cplusplus
extern "C" {
#endif
/*
 * Class:     com_cmy_media_AVService
 * Method:    init
 * Signature: ()Z
 */
JNIEXPORT jboolean JNICALL Java_com_cmy_media_AVService_init
  (JNIEnv *, jobject);

/*
 * Class:     com_cmy_media_AVService
 * Method:    startServer
 * Signature: ()Z
 */
JNIEXPORT jboolean JNICALL Java_com_cmy_media_AVService_startServer
  (JNIEnv *, jobject);

/*
 * Class:     com_cmy_media_AVService
 * Method:    stopServer
 * Signature: ()V
 */
JNIEXPORT void JNICALL Java_com_cmy_media_AVService_stopServer
  (JNIEnv *, jobject);

/*
 * Class:     com_cmy_media_AVService
 * Method:    sendVideoToRemote
 * Signature: (Ljava/lang/String;[BI)V
 */
JNIEXPORT void JNICALL Java_com_cmy_media_AVService_sendVideoToRemote
  (JNIEnv *, jobject, jstring, jbyteArray, jint);

/*
 * Class:     com_cmy_media_AVService
 * Method:    sendAudioToRemote
 * Signature: (Ljava/lang/String;[SI)V
 */
JNIEXPORT void JNICALL Java_com_cmy_media_AVService_sendAudioToRemote
  (JNIEnv *, jobject, jstring, jshortArray, jint);

#ifdef __cplusplus
}
#endif
#endif

3、集成speex、webrtc

# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)

LOCAL_MODULE 	:= cmy_media
LOCAL_CFLAGS 	:= -DWEBRTC_POSIX -DFIXED_POINT -DUSE_KISS_FFT -DEXPORT="" -UHAVE_CONFIG_H
AUDIO_SRC_PATH 	:=$(LOCAL_PATH)
LOCAL_CPPFLAGS 	:=-std=c++11
LOCAL_LDLIBS 	:=-llog -lc

LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/webrtc \
$(LOCAL_PATH)/speex/include \
$(LOCAL_PATH)/ \

LOCAL_SRC_FILES  := \
$(LOCAL_PATH)/webrtc_ns.c \
$(LOCAL_PATH)/SpeexCoder.cpp \
$(LOCAL_PATH)/AVService.cpp \
$(LOCAL_PATH)/com_cmy_media_AVService.cpp \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/aecm/echo_control_mobile.cc \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/aecm/aecm_core.cc \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.cc \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/aecm/aecm_core_c.cc \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/utility/delay_estimator.cc \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/spl_init.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/ring_buffer.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/real_fft.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/division_operations.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/min_max_operations.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/cross_correlation.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/downsample_fast.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/vector_scaling_operations.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/complex_bit_reverse.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/complex_fft.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/randomization_functions.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/spl_sqrt_floor.c \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/ns/noise_suppression.c \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/ns/noise_suppression_x.c \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/ns/ns_core.c \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/ns/nsx_core.c \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/ns/nsx_core_c.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/fft4g.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/copy_set_operations.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/energy.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/get_scaling_square.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/resample_by_2.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/spl_sqrt.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/dot_product_with_scale.cc \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/agc/utility.cc \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/agc/legacy/analog_agc.c \
$(AUDIO_SRC_PATH)/webrtc/modules/audio_processing/agc/legacy/digital_agc.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/vad/webrtc_vad.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/vad/vad_sp.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/vad/vad_core.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/vad/vad_gmm.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/vad/vad_filterbank.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/resample_48khz.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/resample_by_2_internal.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/resample_fractional.c \
$(AUDIO_SRC_PATH)/webrtc/common_audio/signal_processing/splitting_filter.c \
speex/libspeex/bits.c \
speex/libspeex/buffer.c \
speex/libspeex/cb_search.c \
speex/libspeex/exc_5_64_table.c \
speex/libspeex/exc_5_256_table.c \
speex/libspeex/exc_8_128_table.c \
speex/libspeex/exc_10_16_table.c \
speex/libspeex/exc_10_32_table.c \
speex/libspeex/exc_20_32_table.c \
speex/libspeex/fftwrap.c \
speex/libspeex/filterbank.c \
speex/libspeex/filters.c \
speex/libspeex/gain_table.c \
speex/libspeex/gain_table_lbr.c \
speex/libspeex/hexc_10_32_table.c \
speex/libspeex/hexc_table.c \
speex/libspeex/high_lsp_tables.c \
speex/libspeex/jitter.c \
speex/libspeex/kiss_fft.c \
speex/libspeex/kiss_fftr.c \
speex/libspeex/lpc.c \
speex/libspeex/lsp.c \
speex/libspeex/lsp_tables_nb.c \
speex/libspeex/ltp.c \
speex/libspeex/mdf.c \
speex/libspeex/modes.c \
speex/libspeex/modes_wb.c \
speex/libspeex/nb_celp.c \
speex/libspeex/preprocess.c \
speex/libspeex/quant_lsp.c \
speex/libspeex/resample.c \
speex/libspeex/sb_celp.c \
speex/libspeex/scal.c \
speex/libspeex/smallft.c \
speex/libspeex/speex.c \
speex/libspeex/speex_callbacks.c \
speex/libspeex/speex_header.c \
speex/libspeex/stereo.c \
speex/libspeex/vbr.c \
speex/libspeex/vq.c \
speex/libspeex/window.c


include $(BUILD_SHARED_LIBRARY)

二、初始化工作

/*
 * Class:     com_cmy_media_AVService
 * Method:    init
 * Signature: ()Z
 */
JNIEXPORT jboolean JNICALL Java_com_cmy_media_AVService_init(JNIEnv *env,
		jobject obj) {
	env->GetJavaVM(&g_jvm);
	g_obj = env->NewGlobalRef(obj);
	g_aecmInst = WebRtcAecm_Create();
	if (g_aecmInst == NULL) {
		LOGE("WebRtcAecm_Create fail");
		return false;
	}
	if (WebRtcAecm_Init(g_aecmInst, g_nAudioRate) == -1) {
		LOGE("WebRtcAecm_Init fail");
		WebRtcAecm_Free(g_aecmInst);
		g_aecmInst = NULL;
		return false;
	}
	if (!g_speex.Init()) {
		LOGE("Speex Init fail");
		return false;
	}
	LOGE("system init success");
	return true;
}

三、开启socket udp服务

/*
 * Class:     com_cmy_media_AVService
 * Method:    startServer
 * Signature: ()Z
 */
JNIEXPORT jboolean JNICALL Java_com_cmy_media_AVService_startServer(
		JNIEnv * env, jobject obj) {
	service.startServer();//调用定义好的类开启服务
	return true;
}
bool AVService::startServer() {

	if (pthread_create(&m_workThread, NULL, stWorkThread, this) != 0) {
		LOGE("stEpollWaitThread create fail");
		close(m_svrSocket);
		m_svrSocket = -1;
		return false;
	}
	return true;

}

void *AVService::stWorkThread(void *arg) {
	AVService *pvs = (AVService*) arg;
	pvs->WorkThread();
	return NULL;
}

void AVService::WorkThread() {

	m_svrSocket = socket(AF_INET, SOCK_DGRAM, 0);
	if (m_svrSocket < 0) {
		LOGE( " create socket error: %s(errno: %d)", strerror(errno), errno);
		return;
	}
	struct sockaddr_in addr_serv;
	int len;
	memset(&addr_serv, 0, sizeof(struct sockaddr_in));
	addr_serv.sin_family = AF_INET;
	addr_serv.sin_port = htons(SERVER_PORT);
	addr_serv.sin_addr.s_addr =
			INADDR_ANY/*htonl(INADDR_ANY)*//*inet_addr("192.168.88.157")*/;
	len = sizeof(addr_serv);

	LOGE( " binding...");
	if (bind(m_svrSocket, (struct sockaddr *) &addr_serv, sizeof(addr_serv))
			< 0) {
		LOGE( " bind error: %s(errno: %d)", strerror(errno), errno);
		return;
	}

	LOGE( " starting receive...");
	struct timeval tv_out;
	fd_set fd_read;

	while (1) {
		if (m_svrSocket == -1) {
			LOGE("Exit  WorkThread");
			return;
		}
		tv_out.tv_sec = 1;
		tv_out.tv_usec = 0;
		FD_ZERO(&fd_read);
		if (m_svrSocket != -1)
			FD_SET(m_svrSocket, &fd_read);
		if (select(m_svrSocket + 1, &fd_read, NULL, NULL, &tv_out) == -1) {
			if (errno == EINTR)
				continue;
		}

		if (FD_ISSET(m_svrSocket, &fd_read)) {
			OnSocketRead();
		} else {
			LOGE("waiting read...");
		}

	}

}

四、判断socket是否可读

if (FD_ISSET(m_svrSocket, &fd_read)) {
			OnSocketRead();
		} 


int AVService::OnSocketRead() {
	struct sockaddr_in from;
	int receiveCount = 64 * 1024;
	unsigned char recvBuffer[receiveCount];
	int addrlen = sizeof(struct sockaddr_in);
	int ret = recvfrom(m_svrSocket, recvBuffer, receiveCount, 0,
			(struct sockaddr *) &from, &addrlen);
	if (ret > 0) {
		char type[1];
		memcpy(type, recvBuffer, sizeof(char));
		//LOGE("type->%c", type[0]);
		if (type[0] == '0')//从接受数据中获取数据类型,为了方便这里简单的在音视频数据首部加数据类型0=video else =audio通过回调返回到java层
			m_pVideoCallBack(recvBuffer + sizeof(char), ret - sizeof(char));
		else
			m_pAudioCallBack(recvBuffer + sizeof(char), ret - sizeof(char));
	} else {
		if (errno == EAGAIN)
			return 0;

		LOGE("OnSocketRead recv error: %s(errno: %d)", strerror(errno), errno);
		shutdown(m_svrSocket, SHUT_RDWR);
		close(m_svrSocket);
		m_svrSocket = -1;
		return -1;
	}
	return 0;
}

void CALLBACK onVideoCallback(unsigned char* pData, int nLen) {
	JNIEnv *env;
	g_jvm->AttachCurrentThread(&env, NULL);
	jclass cls = env->GetObjectClass(g_obj);

	jmethodID mid = env->GetMethodID(cls, "onVideoCallback", "([BI)V");
	if (mid == NULL) {
		LOGE("find method onServerVideo fail");
		return;
	}

	jbyteArray jbarray = env->NewByteArray(nLen);
	jbyte *jy = (jbyte*) pData;
	env->SetByteArrayRegion(jbarray, 0, nLen, jy);

	env->CallVoidMethod(g_obj, mid, jbarray, nLen);//返回java层进行视频解码
	env->DeleteLocalRef(jbarray);
	g_jvm->DetachCurrentThread();
}

void CALLBACK onAudioCallback(unsigned char* pData, int nLen) {

	JNIEnv *env;
	g_jvm->AttachCurrentThread(&env, NULL);
	jclass cls = env->GetObjectClass(g_obj);

	jmethodID mid = env->GetMethodID(cls, "onAudioCallback", "([SI)V");
	if (mid == NULL) {
		LOGE("find method onClientAudio fail");
		return;
	}
	LOGE("onClientAudio.len->%d", nLen);
	char data[nLen];
	memcpy(data, pData, nLen);

	g_speex.Decoder(data, nLen, m_speexDecBuf);//解码

	if (WebRtcAecm_BufferFarend(g_aecmInst, m_speexDecBuf, 160) != 0) {
		LOGE("WebRtcAecm_BufferFarend fail len=%d", 160);
		return;
	}

	jshortArray arr = env->NewShortArray(160);
	env->SetShortArrayRegion(arr, 0, 160, (const jshort*) m_speexDecBuf);
	env->CallVoidMethod(g_obj, mid, arr, 160);
	env->DeleteLocalRef(arr);
	g_jvm->DetachCurrentThread();
}

初始化解码器


	public int init(Surface surface, int width1, int height1)
	{
		Log.e("VideoClient", "---Decoder init---");
		try
		{
			m_nCount = 0;
			mSurface = surface;
			decoder = MediaCodec.createDecoderByType("video/avc");
			Log.e("VideoClient", "---Decoder createDecoderByType()---");
			MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
			decoder.configure(mediaFormat, mSurface, null, 0);
			Log.e("VideoClient", "---Decoder configure()---");
			decoder.start();
			Log.e("VideoClient", "---Decoder start()---");
		}
		catch(Exception e)
		{
			Log.e("VideoClient1", "---Decoder init----Exception:"+e);
			Log.e("VideoClient1", "---Decoder init----Exception3:"+e.getMessage());
			e.printStackTrace();
			//uninit2();
			return -1;
		}
		return 0;
	}

视频解码

ByteBuffer[] inputBuffers = decoder.getInputBuffers();

			ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
			//Log.e("VideoClient","dequeueInputBuffer");
			int inputBufferIndex = decoder.dequeueInputBuffer(0);//之前是-1
			//Log.e("VideoClient","inputBufferIndex:"+inputBufferIndex);
			if (inputBufferIndex >= 0)
			{
				ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
				inputBuffer.clear();
				inputBuffer.put(buff, 0, nLen);

				long nts = m_nCount*1000000/15;//这里可能需要改为25
				if (nts <= 0)
				{
					nts = 0;
					m_nCount = 0;
				}
				decoder.queueInputBuffer(inputBufferIndex, 0, nLen, nts, 0);
				m_nCount++;
			}
			// 释放缓冲区
			MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
			int outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo,0);
			while (outputBufferIndex >= 0)
			{
				decoder.releaseOutputBuffer(outputBufferIndex, true);
				outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 0);
			}

五、发送数据到远端

所有数据需进过处理后传输


	
/*
 * Class:     com_cmy_media_AVService
 * Method:    sendVideoToRemote
 * Signature: (Ljava/lang/String;[BI)V
 */
JNIEXPORT void JNICALL Java_com_cmy_media_AVService_sendVideoToRemote
(JNIEnv * env, jobject obj,jstring ip, jbyteArray data, jint len) {
	char pBuf[300 * 1024];
	char serverIp[50];
	memset(serverIp, 0, sizeof(serverIp));
	if (!jstringTostring1(env, ip, serverIp, sizeof(serverIp)))
	return;
	env->GetByteArrayRegion(data, 0, len, (jbyte*)pBuf);

	//LOGE(" send to ip=%s",serverIp);
	char type = '0';
	service.sendDataToRemote(serverIp,pBuf,len,type);
}

/*
 * Class:     com_cmy_media_AVService
 * Method:    sendAudioToRemote
 * Signature: (Ljava/lang/String;[SI)V
 */
JNIEXPORT void JNICALL Java_com_cmy_media_AVService_sendAudioToRemote
(JNIEnv *env, jobject obj, jstring ip, jshortArray data, jint len) {
	char clientIp[50];
	memset(clientIp, 0, sizeof(clientIp));
	if (!jstringTostring1(env, ip, clientIp, sizeof(clientIp)))
	return;

	env->GetShortArrayRegion(data, 0, len, (jshort*)g_outAudioBuf);

	if (WebRtcAecm_Process(g_aecmInst, g_outAudioBuf, NULL, m_aecmBuf, len, 50)
			!= 0) {
		LOGE("WebRtcAecm_Process fail len=%d", len);
		return;
	}

	int nEncLen = g_speex.Encoder(m_aecmBuf, m_speexEncBuf);
	char szAudio[100];
	memcpy(szAudio, m_speexEncBuf, nEncLen);

	//LOGE(" send to ip=%s",clientIp);
	char type = '1';
	service.sendDataToRemote(clientIp,szAudio,nEncLen,type);
}
void AVService::sendDataToRemote(char* remoteIp, char* data, int len,
		char type) {
	if (m_svrSocket < 0) {
		LOGE( "server socket error ->%d", m_svrSocket);
		return;
	}
	//LOGE("sizeof(data)->%d", sizeof(data));
	struct sockaddr_in addr_serv;
	memset(&addr_serv, 0, sizeof(addr_serv));
	addr_serv.sin_family = AF_INET;
//	LOGE("sendDataToRemote.remoteIp->%s",remoteIp);
	addr_serv.sin_port = htons(SERVER_PORT);
	addr_serv.sin_addr.s_addr = inet_addr(remoteIp);
	char buff[len + sizeof(char)];
	//LOGE("TYPE->%c",type);
	memcpy(buff, &type, sizeof(char));
	memcpy(buff + sizeof(char), data, len);

	int ret = sendto(m_svrSocket, buff, len + sizeof(char), 0,
			(struct sockaddr *) &addr_serv, sizeof(addr_serv));
	if (ret < 0) {
		LOGE( " send data errno %d\r\n", errno);
		return;
	}
	//LOGE("send data length->%d", len + sizeof(char));

}

1、视频数据

1.1、初始化MediaCodec

encoder = MediaCodec.createEncoderByType("video/avc");//h264
			MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", m_width, m_height);
			mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 500000);
			mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
//			mediaFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CQ);
			mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
			mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
			encoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
			encoder.start();

1.2、通过MediaCodec进行H264编码

视频数据为相机预览回调 编码时需要注意视频数据格式 有些需要转换,网上转换方式很多,这里不重复。否则可能导致绿屏、颜色失真、黑白效果 。由于前期没对视频数据进行正确的转换这些问题都遇到过。

int nLen = 0;
		//swapYV12toI420(input, yuv420, m_width, m_height);
        System.arraycopy(input,0,yuv420,0,input.length);

		try
		{
			ByteBuffer[] inputBuffers = encoder.getInputBuffers();
			ByteBuffer[] outputBuffers = encoder.getOutputBuffers();
			int inputBufferIndex = encoder.dequeueInputBuffer(-1);
			if (inputBufferIndex >= 0)
			{
				ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
				inputBuffer.clear();
				inputBuffer.put(yuv420);

				long nts = m_nCount*1000000/15;
				if (nts <= 0)
				{
					nts = 0;
					m_nCount = 0;
				}
				encoder.queueInputBuffer(inputBufferIndex, 0, yuv420.length, m_nCount, 0);
				m_nCount++;
			}

			MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
			//相机预览帧率为25,故设置超时为40ms,若设置0则outputBufferIndex可能小于0导致马赛克问题
			int outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 40000);

			while (outputBufferIndex >= 0)
			{
				ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];

				byte[] outData = new byte[bufferInfo.size];
				outputBuffer.get(outData);

				if (m_info != null)
				{
					System.arraycopy(outData, 0, output, nLen, outData.length);
					nLen += outData.length;
				}
				else
				{
					// 保存pps sps 只有开始时 第一个帧里有, 保存起来后面用
					ByteBuffer spsPpsBuffer = ByteBuffer.wrap(outData);
					if (spsPpsBuffer.getInt() == 0x00000001)
					{
						m_info = new byte[outData.length];
						System.arraycopy(outData, 0, m_info, 0, outData.length);
					}
					else
					{
						return new byte[0];
					}
				}

				encoder.releaseOutputBuffer(outputBufferIndex, false);
				outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0);

				// key frame 编码器生成关键帧时只有 00 00 00 01 65, 没有pps sps, 要加上
				if ((output[4] & 0x1F)==5)
				{

					System.arraycopy(output, 0, yuv420, 0, nLen);
					System.arraycopy(m_info, 0, output, 0, m_info.length);
					System.arraycopy(yuv420, 0, output, m_info.length, nLen);
					nLen += m_info.length;
				}
			}
		}
		catch(Exception e)
		{
			e.printStackTrace();
		}

		if (nLen > 0) {
			byte[] out = new byte[nLen];
			System.arraycopy(output, 0, out, 0, nLen);
		//	Log.e("$$$$$$$$$$$$$$$$", "h264Encoder: len->" + out.length);
			return out;
		}
		return new byte[0];

2、音频数据

2.1、webrtc消回声

if (WebRtcAecm_Process(g_aecmInst, g_outAudioBuf, NULL, m_aecmBuf, len, 50)
			!= 0) {
		LOGE("WebRtcAecm_Process fail len=%d", len);
		return;
	}

2.2、speex编码压缩

int CSpeexCoder::Encoder(short *pInputData, char *pOutData)
{
	int nbBytes = 0;
	if (m_pEncoder != NULL)
	{
		speex_bits_reset(&m_EncoderBits);
		speex_encode_int(m_pEncoder, pInputData, &m_EncoderBits);
		nbBytes = speex_bits_write(&m_EncoderBits, pOutData, 100);
		//LOGE("speex_encode len=%d",nbBytes);
	}
	return nbBytes;
}

六、断开连接

void AVService::stopServer() {
	LOGE(" disconnect------>");
	if (m_svrSocket != -1) {
		shutdown(m_svrSocket, SHUT_RDWR);
		close(m_svrSocket);
		m_svrSocket = -1;
	}

	if (m_workThread != -1) {
		pthread_join(m_workThread, NULL);
		m_workThread = -1;
	}
}

需要源码请留言

评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值