文章目录
学习opensl es播放音频
c代码实现基础的opensl es的几种类型[assets、url、pcm]opensl_pcm.c(ps:目前存在的bug是播放url会崩溃,详细请参考:谷歌的NDK)
//--------------------------安卓的log
#include <jni.h>
#include <android/log.h>
#define LOG_TAG "zbv"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
//--------------------------安卓的log
// native audio openSL ES
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
//assets android <sys/type.h>是unix/linux系统基本数据类型的头文件
#include <sys/types.h>
#include <android/asset_manager.h>
#include <android/asset_manager_jni.h>
#include <stdio.h>
//engine interface
static SLObjectItf engineObject=NULL;
static SLEngineItf engineEngine;
//outout mix
static SLObjectItf outputMixObject = NULL;
static SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL;
//stone corridor
static SLEnvironmentalReverbSettings reverbSettings = SL_I3DL2_ENVIRONMENT_PRESET_STONECORRIDOR;
// file descriptor player interfaces
static SLObjectItf fdPlayerObject = NULL;
static SLPlayItf fdPlayerPlay;
static SLSeekItf fdPlayerSeek;
static SLMuteSoloItf fdPlayerMuteSolo;
static SLVolumeItf fdPlayerVolume;
// URI player interfaces
static SLObjectItf uriPlayerObject = NULL;
static SLPlayItf uriPlayerPlay;
static SLSeekItf uriPlayerSeek;
static SLMuteSoloItf uriPlayerMuteSolo;//MuteSolo不支持单声道的?
static SLVolumeItf uriPlayerVolume;
// buffer queue player interfaces
static SLObjectItf bqPlayerObject = NULL;
static SLPlayItf bqPlayerPlay;
static SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
static SLEffectSendItf bqPlayerEffectSend;
static SLVolumeItf bqPlayerVolume;
static FILE* pcmFILE;
#define PLAYING_PCM_END 0
static int pcm_play_state=1;
//static short *buffer;
void getPcmData(void **pcm)
{
//该函数表示判断文件结束 =>如果文件结束,则返回非0值,否则返回0
while(!feof(pcmFILE))
{
short out_buffer[44100*2*2];
//每次读取44100*2*2
fread(out_buffer, 44100 * 2 * 2, 1, pcmFILE);
if(out_buffer == NULL)
{
LOGD("%s", "read end");
break;
} else{
LOGD("%s", "reading");
}
*pcm = out_buffer;
break;
}
}
// this callback handler is called every time a buffer finishes playing
void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context)
{
if(bq!=bqPlayerBufferQueue){
return;
}
if(pcm_play_state!=PLAYING_PCM_END){
while(!feof(pcmFILE)){
//每次读取44100*2*2
short out_buffer[44100*2*2];
fread(out_buffer, 44100 * 2 * 2, 1, pcmFILE);
if(out_buffer!=NULL){
SLresult result;
result=(*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue,out_buffer,44100*2*2);
if(result!=SL_RESULT_SUCCESS){
LOGD("入队失败");
return;
}
}else{
pcm_play_state=0;
LOGD("播放over");
}
//每次进入都要跳出
break;
}
}else{
fclose(pcmFILE);
}
/*
if(!context){
return;
}*/
/*
getPcmData(&buffer);
if (NULL != buffer) {
//因为我要播放的pcm数据就是这种格式叙述的音频
LOGD("每次读取44.1khz 16bit 双声道的数据量即44100*2*2");
SLresult result;
// enqueue another buffer
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, buffer, 44100 * 2 * 2);
if(result!=SL_RESULT_SUCCESS){
LOGD("入队失败");
return;
}
}*/
}
/**
创建opensl es引擎---jni
*/
JNIEXPORT jboolean Java_com_example_simpleTestFFmpeg_opensles_OpenSLESActivity_createEngine(JNIEnv* env,jclass clazz){
SLresult result;
//线程安全
const SLEngineOption engineOptions[1] = {{(SLuint32) SL_ENGINEOPTION_THREADSAFE,(SLuint32) SL_BOOLEAN_TRUE}};
//该函数表示:初始化引擎对象给使用者一个处理手柄对象,第四个参数(需要支持的interface数目)为零则会忽视第五、第六个参数
result=slCreateEngine(&engineObject,1,engineOptions,0,NULL,NULL);
if(result!=SL_RESULT_SUCCESS){
LOGD("opensl es引擎创建初始化失败");
return JNI_FALSE;
}
//该函数表示:转化一个Object从未实例化到实例化过程,第二个参数表示是否异步
result=(*engineObject)->Realize(engineObject,SL_BOOLEAN_FALSE);
if(result!=SL_RESULT_SUCCESS){
LOGD("引擎Object实例化失败");
return JNI_FALSE;
}
//该函数表示:得到由Object暴露的接口,这里指的是引擎接口,第二个参数是接口ID,第三个参数是输出的引擎接口对象
result=(*engineObject)->GetInterface(engineObject,SL_IID_ENGINE,&engineEngine);
if(result!=SL_RESULT_SUCCESS){
LOGD("引擎接口获取失败");
return JNI_FALSE;
}
//该函数表示:创建输出混音器--->由引擎接口创建,从第三个参数开始就是支持的interface数目,同样的为零忽略第四第五个参数
const SLInterfaceID interfaceIds[1]={SL_IID_ENVIRONMENTALREVERB};//这里给一个环境混响的接口id
const SLboolean reqs[1]={SL_BOOLEAN_TRUE};
result=(*engineEngine)->CreateOutputMix(engineEngine,&outputMixObject,1,interfaceIds,reqs);
if(result!=SL_RESULT_SUCCESS){
LOGD("创建输出混音器失败");
return JNI_FALSE;
}
//同样的实例化输出混音器对象
result=(*outputMixObject)->Realize(outputMixObject,SL_BOOLEAN_FALSE);
if(result!=SL_RESULT_SUCCESS){
LOGD("输出混音器outout mix实例化失败");
return JNI_FALSE;
}
//因为环境混响接口的失败与否没关系的
//同样的申请支持了环境混响EnvironmentalReverb接口就可以获取该接口对象
result=(*outputMixObject)->GetInterface(outputMixObject,SL_IID_ENVIRONMENTALREVERB,&outputMixEnvironmentalReverb);
if(result==SL_RESULT_SUCCESS){
result=(*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(outputMixEnvironmentalReverb,&reverbSettings);
if(result!=SL_RESULT_SUCCESS){
LOGD("混响属性设置失败");
}
}else{
LOGD("获取环境混响接口失败");
}
return JNI_TRUE;
}
/**
Assets资源---JNI
*/
JNIEXPORT jboolean Java_com_example_simpleTestFFmpeg_opensles_OpenSLESActivity_createAssetAudioPlayer(JNIEnv* env,jclass clazz,jobject assetsManager,jstring fileName){
SLresult result;
const char* resourcePath=(*env)->GetStringUTFChars(env,fileName,NULL);
LOGD("resourcePath=%s",resourcePath);
// use asset manager to open asset by filename--->ndk-9及以上
AAssetManager* mgr = AAssetManager_fromJava(env, assetsManager);
if(!mgr){
LOGD("获取AssetsManger失败");
return JNI_FALSE;
}
AAsset* asset = AAssetManager_open(mgr, resourcePath, AASSET_MODE_UNKNOWN);
(*env)->ReleaseStringUTFChars(env,fileName,resourcePath);
if(!asset){
LOGD("获取Assets失败");
return JNI_FALSE;
}
//获取file descriptor
off_t start, length;
int fd = AAsset_openFileDescriptor(asset, &start, &length);
if(fd<=0){
LOGD("获取文件描述器失败");
return JNI_FALSE;
}
AAsset_close(asset);
//configure audio source---这里为file descriptor
SLDataLocator_AndroidFD loc_fd = {SL_DATALOCATOR_ANDROIDFD, fd, start, length};
SLDataFormat_MIME format_mime = {SL_DATAFORMAT_MIME, NULL, SL_CONTAINERTYPE_UNSPECIFIED};
SLDataSource audioSrc={&loc_fd,&format_mime};
//configure audio sink---如果是SLDataLocator_IODevice和SL_DATALOCATOR_OUTPUTMIX则第二个参数被忽略
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk={&loc_outmix,NULL};
//创建audio player
const SLInterfaceID interfaceIds[3]={SL_IID_SEEK, SL_IID_MUTESOLO, SL_IID_VOLUME};//这里给3个接口id
const SLboolean reqs[3]={SL_BOOLEAN_TRUE,SL_BOOLEAN_TRUE,SL_BOOLEAN_TRUE};
result=(*engineEngine)->CreateAudioPlayer(engineEngine,&fdPlayerObject,&audioSrc,&audioSnk,3,interfaceIds,reqs);
if(result!=SL_RESULT_SUCCESS){
LOGD("创建audioplayer失败");
return JNI_FALSE;
}
//实例化audio player
result=(*fdPlayerObject)->Realize(fdPlayerObject,SL_BOOLEAN_FALSE);
if(result!=SL_RESULT_SUCCESS){
LOGD("实例化audioplayer失败");
return JNI_FALSE;
}
LOGD("---createAssetAudioPlayer---");
//获取播放接口
result=(*fdPlayerObject)->GetInterface(fdPlayerObject,SL_IID_PLAY, &fdPlayerPlay);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取play接口对象失败");
return JNI_FALSE;
}
//获取seek接口
result=(*fdPlayerObject)->GetInterface(fdPlayerObject,SL_IID_SEEK, &fdPlayerSeek);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取seek接口对象失败");
return JNI_FALSE;
}
//获取静音接口
result=(*fdPlayerObject)->GetInterface(fdPlayerObject,SL_IID_MUTESOLO, &fdPlayerMuteSolo);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取mutesolo接口对象失败");
return JNI_FALSE;
}
//获取音量接口
result=(*fdPlayerObject)->GetInterface(fdPlayerObject,SL_IID_VOLUME, &fdPlayerVolume);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取volume接口对象失败");
return JNI_FALSE;
}
return JNI_TRUE;
}
/**
assets的播放和暂停控制---JNI
*/
JNIEXPORT void Java_com_example_simpleTestFFmpeg_opensles_OpenSLESActivity_setPlayingAssetAudioPlayer(JNIEnv* env,jclass clazz,jboolean isPlaying){
SLresult result;
if(NULL!=fdPlayerPlay){
//三种播放状态:SL_PLAYSTATE_PLAYING SL_PLAYSTATE_PAUSED SL_PLAYSTATE_STOPPED
result=(*fdPlayerPlay)->SetPlayState(fdPlayerPlay,isPlaying?SL_PLAYSTATE_PLAYING:SL_PLAYSTATE_PAUSED);
if(result!=SL_RESULT_SUCCESS){
LOGD("播放或者暂停失败");
}
}
}
/**
uri资源---JNI
*/
JNIEXPORT jboolean Java_com_example_simpleTestFFmpeg_opensles_OpenSLESActivity_createUriAudioPlayer(JNIEnv* env,jclass clazz,jstring uri){
SLresult result;
const char* urlPath=(*env)->GetStringUTFChars(env,uri,NULL);
LOGD("uri=%s",urlPath);
/*
SL_DATAFORMAT:三种格式SL_DATAFORMAT_MIME SL_DATAFORMAT_PCM SL_DATAFORMAT_RESERVED3
*/
//configure audio source---这里为URI
SLDataLocator_URI loc_uri = {SL_DATALOCATOR_URI, (SLchar*)urlPath};
SLDataFormat_MIME format_mime = {SL_DATAFORMAT_MIME, NULL, SL_CONTAINERTYPE_UNSPECIFIED};
SLDataSource audioSrc={&loc_uri,&format_mime};
//configure audio sink---如果是SLDataLocator_IODevice和SL_DATALOCATOR_OUTPUTMIX则第二个参数被忽略
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk={&loc_outmix,NULL};
LOGD("to create player");
//创建audio player
const SLInterfaceID interfaceIds[3]={SL_IID_SEEK, SL_IID_MUTESOLO, SL_IID_VOLUME};//这里给3个接口id
const SLboolean reqs[3]={SL_BOOLEAN_TRUE,SL_BOOLEAN_TRUE,SL_BOOLEAN_TRUE};
result=(*engineEngine)->CreateAudioPlayer(engineEngine,&uriPlayerObject,&audioSrc,&audioSnk,3,interfaceIds,reqs);
if(result!=SL_RESULT_SUCCESS){
LOGD("创建audioplayer失败");
return JNI_FALSE;
}
LOGD("release string");
// release the Java string and UTF-8
(*env)->ReleaseStringUTFChars(env,uri,urlPath);
LOGD("to realize");
//实例化audio player
result=(*uriPlayerObject)->Realize(uriPlayerObject,SL_BOOLEAN_FALSE);
if(result!=SL_RESULT_SUCCESS){
LOGD("实例化audioplayer失败");
return JNI_FALSE;
}
LOGD("---createUriAudioPlayer---");
//获取播放接口
result=(*uriPlayerObject)->GetInterface(uriPlayerObject,SL_IID_PLAY, &uriPlayerPlay);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取play接口对象失败");
return JNI_FALSE;
}
//获取seek接口
result=(*uriPlayerObject)->GetInterface(uriPlayerObject,SL_IID_SEEK, &uriPlayerSeek);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取seek接口对象失败");
return JNI_FALSE;
}
//获取静音接口
result=(*uriPlayerObject)->GetInterface(uriPlayerObject,SL_IID_MUTESOLO, &uriPlayerMuteSolo);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取mutesolo接口对象失败");
return JNI_FALSE;
}
//获取音量接口
result=(*uriPlayerObject)->GetInterface(uriPlayerObject,SL_IID_VOLUME, &uriPlayerVolume);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取volume接口对象失败");
return JNI_FALSE;
}
return JNI_TRUE;
}
/**
uri的播放和暂停控制---JNI
*/
JNIEXPORT void Java_com_example_simpleTestFFmpeg_opensles_OpenSLESActivity_setPlayingUriAudioPlayer(JNIEnv* env,jclass clazz,jboolean isPlaying){
SLresult result;
if(NULL!=uriPlayerPlay){
//三种播放状态:SL_PLAYSTATE_PLAYING SL_PLAYSTATE_PAUSED SL_PLAYSTATE_STOPPED
result=(*uriPlayerPlay)->SetPlayState(uriPlayerPlay,isPlaying?SL_PLAYSTATE_PLAYING:SL_PLAYSTATE_PAUSED);
if(result!=SL_RESULT_SUCCESS){
LOGD("播放或者暂停失败");
}
}
}
/**
AndroidSimpleBufferQueue
*/
JNIEXPORT jboolean Java_com_example_simpleTestFFmpeg_opensles_OpenSLESActivity_createBufferQueueAudioPlayer(JNIEnv* env,jclass clazz){
SLresult result;
// configure audio source
SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
int numChannels=2;
SLuint32 samplesPerSec=SL_SAMPLINGRATE_44_1;
SLuint32 bitsPerSample=SL_PCMSAMPLEFORMAT_FIXED_16;
SLuint32 containerSize=SL_PCMSAMPLEFORMAT_FIXED_16;
//引文channels=2,native-audio-jni.c中的例子是单声道的所以取SL_SPEAKER_FRONT_CENTER
SLuint32 channelMask=SL_SPEAKER_FRONT_LEFT|SL_SPEAKER_FRONT_RIGHT;
SLuint32 endianness=SL_BYTEORDER_LITTLEENDIAN;
SLDataFormat_PCM format_pcm={SL_DATAFORMAT_PCM,(SLuint32)numChannels,samplesPerSec,bitsPerSample,containerSize,channelMask,endianness};
SLDataSource audioSrc = {&loc_bufq, &format_pcm};
// configure audio sink
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk = {&loc_outmix, NULL};
// create audio player
const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_EFFECTSEND,SL_IID_VOLUME};
const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE,SL_BOOLEAN_TRUE};
result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk,
3, ids, req);
if(result!=SL_RESULT_SUCCESS){
LOGD("创建audioplayer失败");
return JNI_FALSE;
}
result=(*bqPlayerObject)->Realize(bqPlayerObject,SL_BOOLEAN_FALSE);
if(result!=SL_RESULT_SUCCESS){
LOGD("实例化audioplayer失败");
return JNI_FALSE;
}
LOGD("---createBufferQueueAudioPlayer---");
// get the play interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取play接口对象失败");
return JNI_FALSE;
}
// get the buffer queue interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
&bqPlayerBufferQueue);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取BUFFERQUEUE接口对象失败");
return JNI_FALSE;
}
// register callback on the buffer queue
result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取play接口对象失败");
return JNI_FALSE;
}
// get the effect send interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_EFFECTSEND,
&bqPlayerEffectSend);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取EFFECTSEND接口对象失败");
return JNI_FALSE;
}
result = (*bqPlayerObject)->GetInterface(bqPlayerObject,SL_IID_VOLUME,&bqPlayerVolume);
if(result!=SL_RESULT_SUCCESS){
LOGD("获取volume接口对象失败");
return JNI_FALSE;
}
// set the player's state to playing
result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
if(result!=SL_RESULT_SUCCESS){
LOGD("设置为可播放状态失败");
return JNI_FALSE;
}
return JNI_TRUE;
}
/**
播放pcm文件
*/
JNIEXPORT void Java_com_example_simpleTestFFmpeg_opensles_OpenSLESActivity_playingPcm(JNIEnv* env,jclass clazz,jstring pcmFile){
const char* pcm_path=(*env)->GetStringUTFChars(env,pcmFile,NULL);
LOGE("pcm_path=%s",pcm_path);
pcmFILE=fopen(pcm_path,"rb");
if(!pcmFILE){
LOGD("无法打开pcm文件");
return;
}
(*env)->ReleaseStringUTFChars(env,pcmFile,pcm_path);
//主动调用回调函数开始工作
bqPlayerCallback(bqPlayerBufferQueue,NULL);
}
/**
引擎关闭,资源的释放
*/
JNIEXPORT void Java_com_example_simpleTestFFmpeg_opensles_OpenSLESActivity_shutdown(JNIEnv* env,jclass clazz){
if(NULL!=fdPlayerObject){
(*fdPlayerObject)->Destroy(fdPlayerObject);
fdPlayerObject=NULL;
fdPlayerPlay=NULL;
fdPlayerSeek=NULL;
fdPlayerMuteSolo=NULL;
fdPlayerVolume=NULL;
}
if(NULL!=uriPlayerObject){
(*uriPlayerObject)->Destroy(uriPlayerObject);
uriPlayerObject=NULL;
uriPlayerPlay=NULL;
uriPlayerSeek=NULL;
uriPlayerMuteSolo=NULL;
uriPlayerVolume=NULL;
}
// destroy buffer queue audio player object, and invalidate all associated interfaces
if (bqPlayerObject != NULL) {
(*bqPlayerObject)->Destroy(bqPlayerObject);
bqPlayerObject = NULL;
bqPlayerPlay = NULL;
bqPlayerBufferQueue = NULL;
bqPlayerEffectSend = NULL;
bqPlayerVolume = NULL;
}
if(NULL!=outputMixObject){
(*outputMixObject)->Destroy(outputMixObject);
outputMixObject=NULL;
outputMixEnvironmentalReverb=NULL;
}
if (engineObject != NULL) {
(*engineObject)->Destroy(engineObject);
engineObject = NULL;
engineEngine = NULL;
}
}
Activity代码[细节逻辑可能有问题,我是一个一个调试的所以通用一个created布尔类型]:
package com.example.simpleTestFFmpeg.opensles;
import android.content.res.AssetManager;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import com.example.simpleTestFFmpeg.R;
public class OpenSLESActivity extends AppCompatActivity {
static {
System.loadLibrary("opensl_pcm");
}
private AssetManager assetManager;
private boolean created = false;
private boolean isPlayingAsset = false;
private boolean isPlayingUri = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_open_sles);
assetManager = getAssets();
createEngine();
}
/**
* Called when the activity is about to be destroyed.
*/
@Override
protected void onPause() {
// turn off all audio
isPlayingAsset = false;
setPlayingAssetAudioPlayer(false);
isPlayingUri = false;
setPlayingUriAudioPlayer(false);
super.onPause();
}
/**
* Called when the activity is about to be destroyed.
*/
@Override
protected void onDestroy() {
shutdown();
super.onDestroy();
}
//opensles引擎创建
private native boolean createEngine();
//-----------Asset
private native boolean createAssetAudioPlayer(AssetManager assetManager, String fileName);
private native void setPlayingAssetAudioPlayer(boolean isPlaying);
//-----------Asset
//-----------uri
private native boolean createUriAudioPlayer(String uri);
private native void setPlayingUriAudioPlayer(boolean isPlaying);
//-----------uri
//-----------BufferQueuePCM
private native boolean createBufferQueueAudioPlayer();
private native void playingPcm(String pcmPath);
//-----------BufferQueuePCM
//释放资源
private native void shutdown();
public void playMP3(View view) {
if (!created) {
created = createAssetAudioPlayer(assetManager, "lky_bhs.mp3");
} else {
isPlayingAsset = !isPlayingAsset;
setPlayingAssetAudioPlayer(isPlayingAsset);
}
}
//http://www.freesound.org/data/previews/18/18765_18799-lq.mp3
public void playUri(View view) {
Log.d("zbv", "playUri=" + created);
if (!created) {
created = createUriAudioPlayer("http://www.freesound.org/data/previews/18/18765_18799-lq.mp3");
} else {
isPlayingUri = !isPlayingUri;
setPlayingUriAudioPlayer(isPlayingUri);
}
}
public void playPCM(View view) {
Log.d("zbv", "pcm created=" + created);
if (!created) {
created = createBufferQueueAudioPlayer();
}
Log.d("zbv", "pcm created=" + created);
if (created) {
playingPcm(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC)
.getAbsolutePath() + "/lky_bhs.pcm");
}
}
}
附上Android.mk[用到的基本都是NDK内部链接库]
# Copyright (C) 2010 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := opensl_pcm
LOCAL_SRC_FILES := opensl_pcm.c
# for native audio
LOCAL_LDLIBS += -lOpenSLES
# for logging
LOCAL_LDLIBS += -llog
# for native asset manager
LOCAL_LDLIBS += -landroid
include $(BUILD_SHARED_LIBRARY)
学习参考资料:opensl es的api文档
最后说下自己学习后的心得:这些系统性的api都是有套路和思路可循的,要学会查api搞懂每个函数的含义以及需要的参数基本上就会使用正常,但是万丈高楼平地起,基础的知识还是很有必要的,c语言的用法,音频的理解等都需要进一步取了解才可以用到更大的场景,才能灵活运用,继续学习中。。。