cygwin编译见上一篇文章.
在ffmpeg/arm目录中添加Android.mk 主要目的就是将动态库给发布到libs下
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavcodec
LOCAL_SRC_FILES:= lib/libavcodec-55.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavformat
LOCAL_SRC_FILES:= lib/libavformat-55.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libswscale
LOCAL_SRC_FILES:= lib/libswscale-2.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavutil
LOCAL_SRC_FILES:= lib/libavutil-52.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libavfilter
LOCAL_SRC_FILES:= lib/libavfilter-4.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE:= libwsresample
LOCAL_SRC_FILES:= lib/libswresample-0.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
增加Media.h
#pragma once
#include <jni.h>
#include <android/native_window_jni.h>
#include "utils/Lock.h"
#include <pthread.h>
//ffmpeg 需要先定义 __STDC_CONSTANT_MACROS 才能通过 c++ 编译
#define __STDC_CONSTANT_MACROS
#ifndef INT64_C
#define INT64_C(c) (c ## LL)
#define UINT64_C(c) (c ## ULL)
#endif
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libavutil/dict.h>
#include <libavutil/frame.h>
#include <libavutil/mem.h>
#include <libavutil/pixfmt.h>
#include <libswscale/swscale.h>
#include <libavutil/time.h>
#include <libavutil/opt.h>
#include <libswresample/swresample.h>
}
class Media
{
public:
Media();
~Media();
void setSurface(JNIEnv *pEnv, jobject pSurface,int pWidth,int pHeight);
bool initPath(const char * path);
bool initCodec(int width,int height);
int getResWidth();
int getResHeight();
void play();
void pause();
void stop();
bool isPlaying();
void decodeAndRenderPic(void *pBuffer,int dwBufsize);
void decodeAudioAndPlay(void *pBuffer,int dwBufsize);
private:
static void* decodeAndRenderAdpt(void *params);
void decodeAndRender();
private:
bool bInit;
ANativeWindow* window;
char *videoFileName;
AVFormatContext *formatCtx;
int videoStream;
int audioStream;
AVCodecContext *codecCtx;
AVCodecContext *codecCtxAudio;
AVFrame *decodedFrame;
AVFrame *frameRGBA ;
jobject bitmap;
void* buffer;
struct SwsContext *sws_ctx;
struct SwrContext *swr_ctx;
int width;
int height;
bool _stop;
pthread_t decodeThread;
Mutex mutexSurface;
Mutex lockWindow;
};
增加Media.cpp
#include "Media.h"
#include "Audio.h"
//ffmpeg 需要先定义 __STDC_CONSTANT_MACROS 才能通过 c++ 编译
#define __STDC_CONSTANT_MACROS
#include <android/native_window_jni.h>
#include <stdint.h>
#include <stdio.h>
#include <string.h>
#include "utils/Log.h"
#include "cu.h"
#include <unistd.h>
#include <sys/syscall.h>
#include <sys/linux-syscalls.h>
#define SYS_gettid __NR_gettid
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libavutil/dict.h>
#include <libavutil/frame.h>
#include <libavutil/mem.h>
#include <libavutil/pixfmt.h>
#include <libswscale/swscale.h>
}
#define RGB_SIZE 4
//AV_PIX_FMT_RGBA,AV_PIX_FMT_RGB24
#define AV_FMT AV_PIX_FMT_RGBA
Media::Media():mutexSurface(true),window(NULL),lockWindow(false)
,frameRGBA(NULL),decodedFrame(NULL)
,codecCtx(NULL),formatCtx(NULL),_stop(true)
,buffer(NULL),height(0),width(0),videoStream(-1)
,sws_ctx(NULL),videoFileName(NULL),audioStream(-1)
,codecCtxAudio(NULL),swr_ctx(NULL),decodeThread(NULL)
{
bInit = false;
}
Media::~Media(){
stop();
if(NULL!=decodeThread)
{
pthread_join(decodeThread, NULL);
}
if(NULL!=window)
{
ANativeWindow_release(window);
window=NULL;
}
// Free the RGB image
if(NULL!=frameRGBA)
{
av_free(frameRGBA);
frameRGBA=NULL;
}
// Free the YUV frame
if(NULL!=decodedFrame)
{
av_free(decodedFrame);
decodedFrame=NULL;
}
// Close the codec
if(NULL!=codecCtx)
{
avcodec_close(codecCtx);
codecCtx=NULL;
}
// Close the video file
if(NULL!=formatCtx)
{
avformat_close_input(&formatCtx);
formatCtx=NULL;
}
}
void Media::setSurface(JNIEnv *pEnv, jobject pSurface,int pWidth,int pHeight)
{
LOGD("Media::setSurface start, %d,%d,%d", (int)pSurface , pWidth, pHeight);
if (0 != pSurface) {
if(pWidth <=0 || pHeight<=0)
{
LOGD("Media::setSurface width or height is zero !!! %d,%d", pWidth, pHeight);
return;
}
if(NULL==window)
{
synchronized(lockWindow)
{
// get the native window reference
window = ANativeWindow_fromSurface(pEnv, pSurface);
// set format and size of window buffer WINDOW_FORMAT_RGBA_8888
ANativeWindow_setBuffersGeometry(window, 0, 0, WINDOW_FORMAT_RGBA_8888);
}
}
} else {
stop();
if(NULL!=window)
{
// release the native window
synchronized(lockWindow)
{
ANativeWindow_release(window);
window=NULL;
}
}
return;
}
//reset width and height
width = pWidth;
height = pHeight;
if(NULL != buffer)
{
free(buffer);
buffer=NULL;
}
buffer = malloc(pWidth * pHeight * RGB_SIZE);
if(NULL == buffer)
{
LOGE("Media::setSurface Cannot malloc buffer size : %d!", pWidth * pHeight * RGB_SIZE);
return;
}
//get the scaling context
sws_ctx = sws_getContext (
codecCtx->width,
codecCtx->height,
codecCtx->pix_fmt,
pWidth,
pHeight,
AV_FMT,
SWS_FAST_BILINEAR,//SWS_BILINEAR,
NULL,
NULL,
NULL
);
// Assign appropriate parts of bitmap to image planes in pFrameRGBA
// Note that pFrameRGBA is an AVFrame, but AVFrame is a superset
// of AVPicture
avpicture_fill((AVPicture *)frameRGBA, (uint8_t *)buffer, AV_FMT,
pWidth, pHeight);
LOGD("Media::setSurface window:%d , mutexInit.isLocked: %d !", (int)window,(int) mutexSurface.isLocked());
if(NULL!=window && mutexSurface.isLocked())
{
LOGD("Media::setSurface unlock surface!");
mutexSurface.unlock();
}
LOGD(&