Android JNI利用opengl渲染文字 (一)

4 篇文章 0 订阅

Learning opengl 中关于文字渲染的部分,写的非常详细,但是在android jni环境下如何利用opengl的能力,渲染出文字,找了一圈,确实没有特别讲这块的内容,这里涉及到几点比较重要的点,首先如何把surface设置到c++层,其次如何家在freetype库,第三,渲染文字shader在android下如何写。

创建Native C++项目

名字可以随便起,不过语言最好选择Kotilin,跑起来的话
在这里插入图片描述

修改activity_main.xml

删除label组建,添加一个surfaceview,代码如下

<SurfaceView
   android:id="@+id/surfaceView"
   android:layout_width="0dp"
   android:layout_height="0dp"
   app:layout_constraintBottom_toBottomOf="parent"
   app:layout_constraintEnd_toEndOf="parent"
   app:layout_constraintStart_toStartOf="parent"
   app:layout_constraintTop_toTopOf="parent" />

修改MainActivity.kt获取surface view

glSurfaceView =  findViewById<SurfaceView>(R.id.surfaceView)
glSurfaceView.holder.addCallback(object : SurfaceHolder.Callback {
    override fun surfaceCreated(holder: SurfaceHolder) {
        nativeSetView(glSurfaceView.holder.surface)
    }
    override fun surfaceChanged(surfaceHolder: SurfaceHolder, i: Int, width: Int, height: Int) {   
    }
    override fun surfaceDestroyed(holder: SurfaceHolder) {
    }
})

添加native函数nativeSetView

添加以后,android studio会把函数名设置为红色,如果鼠标移上去,ide会提示你是不是要建立对应的c++调用函数,这是最简单的方式。

 external fun nativeSetView(surface: Surface)

创建后的函数中获取ANativeWindow

这里需要在文件中建立一个全局变量来保证各个函数能用到这个变量。

g_window = ANativeWindow_fromSurface(env, surface);

渲染一个纯色

继续添加以下代码,glClearColor之前的代码都是套路的代码,都是准备opengl的各个环境。

EGLDisplay mDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (mDisplay == EGL_NO_DISPLAY) {
    printf("egl display failed");
}
if (EGL_TRUE != eglInitialize(mDisplay, 0, 0)) {
    printf("eglInitialize failed");
}

EGLConfig eglConfig;
EGLint configNum;
EGLint configSpec[] = {
        EGL_RED_SIZE, 8,
        EGL_GREEN_SIZE, 8,
        EGL_BLUE_SIZE, 8,
        EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
        EGL_NONE
};

if (EGL_TRUE != eglChooseConfig(mDisplay, configSpec, &eglConfig, 1, &configNum)) {
    printf("eglChooseConfig failed");
}

EGLSurface mWinSurface = eglCreateWindowSurface(mDisplay, eglConfig, mWindow, 0);
if (mWinSurface == EGL_NO_SURFACE) {
    printf("eglCreateWindowSurface failed");
}

const EGLint ctxAttr[] = {
        EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE
};
EGLContext context = eglCreateContext(mDisplay, eglConfig, EGL_NO_CONTEXT, ctxAttr);
if (context == EGL_NO_CONTEXT) {
    printf("eglCreateContext failed");
}
if (EGL_TRUE != eglMakeCurrent(mDisplay, mWinSurface, mWinSurface, context)) {
    printf("eglMakeCurrent failed");
}

//draw call
glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
glClear( GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
eglSwapBuffers(mDisplay, mWinSurface);
  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
首先,需要在 C++ 中使用 FFmpeg 库来解码 MP4 文件,然后将解码后的视频帧传递给 OpenGL ES 进行渲染。这个过程需要用到 JNI 技术,将 C++ 的代码嵌入到 Java 中。 下面是一个简单的 JNI OpenGL ES 渲染 MP4 的代码示例: ```java public class VideoRenderer { static { System.loadLibrary("native-lib"); } public native void init(String videoPath, Surface surface); public native void render(); public native void release(); } public class MainActivity extends Activity implements SurfaceHolder.Callback { private SurfaceView mSurfaceView; private SurfaceHolder mSurfaceHolder; private VideoRenderer mRenderer; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mSurfaceView = (SurfaceView) findViewById(R.id.surface_view); mSurfaceHolder = mSurfaceView.getHolder(); mSurfaceHolder.addCallback(this); } @Override public void surfaceCreated(SurfaceHolder holder) { mRenderer = new VideoRenderer(); mRenderer.init("/sdcard/video.mp4", holder.getSurface()); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } @Override public void surfaceDestroyed(SurfaceHolder holder) { mRenderer.release(); } } ``` native-lib.cpp: ```c++ #include "native-lib.h" #include <jni.h> #include <android/native_window_jni.h> #include <android/log.h> #include <EGL/egl.h> #include <GLES/gl.h> #include <GLES2/gl2.h> #include <GLES2/gl2ext.h> #include <opencv2/opencv.hpp> #include <opencv2/core/mat.hpp> #include <opencv2/imgproc/types_c.h> #include <libavutil/imgutils.h> #include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libswscale/swscale.h> #define TAG "OpenGLRenderer" #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__) #define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__) using namespace cv; extern "C" { static const GLfloat VERTEX_BUF[] = { -1.0f, -1.0f, 0.0f, 1.0f, -1.0f, 0.0f, -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f }; static const GLfloat TEX_COORD_BUF[] = { 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f }; static const char *videoPath = NULL; static AVFormatContext *pFormatCtx = NULL; static AVCodecContext *pCodecCtx = NULL; static AVCodec *pCodec = NULL; static int videoStream = -1; static AVFrame *pFrame = NULL; static AVFrame *pFrameRGB = NULL; static uint8_t *buffer = NULL; static struct SwsContext *sws_ctx = NULL; static ANativeWindow *nativeWindow = NULL; static ANativeWindow_Buffer windowBuffer; static GLuint program = 0; static GLuint texture = 0; static GLuint vertexShader = 0; static GLuint fragmentShader = 0; static GLuint positionHandle = 0; static GLuint texCoordHandle = 0; static GLuint textureHandle = 0; static void checkGlError(const char *op) { GLint error; for (error = glGetError(); error; error = glGetError()) { LOGE("after %s() glError (0x%x)\n", op, error); } } static GLuint loadShader(GLenum type, const char *shaderCode) { GLuint shader = glCreateShader(type); glShaderSource(shader, 1, &shaderCode, NULL); glCompileShader(shader); return shader; } static GLuint createProgram(const char *vertexShaderCode, const char *fragmentShaderCode) { GLuint vertexShader = loadShader(GL_VERTEX_SHADER, vertexShaderCode); GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode); GLuint program = glCreateProgram(); glAttachShader(program, vertexShader); glAttachShader(program, fragmentShader); glLinkProgram(program); return program; } static void initShader() { const char *vertexShaderCode = "attribute vec4 position;\n" "attribute vec2 texCoord;\n" "varying vec2 vTexCoord;\n" "void main() {\n" " gl_Position = position;\n" " vTexCoord = texCoord;\n" "}"; const char *fragmentShaderCode = "precision mediump float;\n" "varying vec2 vTexCoord;\n" "uniform sampler2D texture;\n" "void main() {\n" " gl_FragColor = texture2D(texture, vTexCoord);\n" "}"; program = createProgram(vertexShaderCode, fragmentShaderCode); positionHandle = glGetAttribLocation(program, "position"); glEnableVertexAttribArray(positionHandle); glVertexAttribPointer(positionHandle, 3, GL_FLOAT, GL_FALSE, 0, VERTEX_BUF); texCoordHandle = glGetAttribLocation(program, "texCoord"); glEnableVertexAttribArray(texCoordHandle); glVertexAttribPointer(texCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, TEX_COORD_BUF); textureHandle = glGetUniformLocation(program, "texture"); } static void initTexture() { glGenTextures(1, &texture); glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); } static void initDecoder() { av_register_all(); avformat_network_init(); pFormatCtx = avformat_alloc_context(); if (avformat_open_input(&pFormatCtx, videoPath, NULL, NULL) != 0) { LOGE("Couldn't open file: %s\n", videoPath); return; } if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { LOGE("Couldn't find stream information.\n"); return; } for (int i = 0; i < pFormatCtx->nb_streams; i++) { if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { videoStream = i; break; } } if (videoStream == -1) { LOGE("Couldn't find video stream.\n"); return; } pCodecCtx = avcodec_alloc_context3(NULL); avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoStream]->codecpar); pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if (pCodec == NULL) { LOGE("Codec not found.\n"); return; } if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { LOGE("Could not open codec.\n"); return; } pFrame = av_frame_alloc(); pFrameRGB = av_frame_alloc(); int frameSize = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1); buffer = (uint8_t *) av_malloc(frameSize * sizeof(uint8_t)); av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1); sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL); } static void initEGL(Surface *surface) { EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY); if (display == EGL_NO_DISPLAY) { LOGE("eglGetDisplay() failed.\n"); return; } EGLint majorVersion, minorVersion; if (eglInitialize(display, &majorVersion, &minorVersion) == EGL_FALSE) { LOGE("eglInitialize() failed.\n"); return; } EGLint numConfigs; EGLConfig config; EGLint configAttribs[] = { EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_BLUE_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_RED_SIZE, 8, EGL_DEPTH_SIZE, 24, EGL_NONE }; if (eglChooseConfig(display, configAttribs, &config, 1, &numConfigs) == EGL_FALSE) { LOGE("eglChooseConfig() failed.\n"); return; } EGLint contextAttribs[] = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE }; EGLContext context = eglCreateContext(display, config, EGL_NO_CONTEXT, contextAttribs); if (context == EGL_NO_CONTEXT) { LOGE("eglCreateContext() failed.\n"); return; } EGLSurface eglSurface = eglCreateWindowSurface(display, config, surface, NULL); if (eglSurface == EGL_NO_SURFACE) { LOGE("eglCreateWindowSurface() failed.\n"); return; } if (eglMakeCurrent(display, eglSurface, eglSurface, context) == EGL_FALSE) { LOGE("eglMakeCurrent() failed.\n"); return; } initShader(); initTexture(); } JNIEXPORT void JNICALL Java_com_example_VideoRenderer_init(JNIEnv *env, jobject instance, jstring videoPath_, jobject surface) { videoPath = env->GetStringUTFChars(videoPath_, 0); nativeWindow = ANativeWindow_fromSurface(env, surface); initDecoder(); initEGL(nativeWindow); } JNIEXPORT void JNICALL Java_com_example_VideoRenderer_render(JNIEnv *env, jobject instance) { ANativeWindow_acquire(nativeWindow); if (ANativeWindow_lock(nativeWindow, &windowBuffer, NULL) == 0) { AVPacket packet; int frameFinished; while (av_read_frame(pFormatCtx, &packet) >= 0) { if (packet.stream_index == videoStream) { avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); if (frameFinished) { sws_scale(sws_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize); glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture); glTexImage2D(GL_TEXTURE_EXTERNAL_OES, 0, GL_RGBA, pCodecCtx->width, pCodecCtx->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, pFrameRGB->data[0]); glUseProgram(program); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture); glUniform1i(textureHandle, 0); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); eglSwapBuffers(eglGetDisplay(EGL_DEFAULT_DISPLAY), eglGetCurrentSurface(EGL_DRAW)); } } av_packet_unref(&packet); } } ANativeWindow_unlockAndPost(nativeWindow); ANativeWindow_release(nativeWindow); } JNIEXPORT void JNICALL Java_com_example_VideoRenderer_release(JNIEnv *env, jobject instance) { av_frame_free(&pFrame); av_frame_free(&pFrameRGB); avcodec_close(pCodecCtx); avformat_close_input(&pFormatCtx); avformat_network_deinit(); ANativeWindow_release(nativeWindow); eglTerminate(eglGetDisplay(EGL_DEFAULT_DISPLAY)); } } ``` 需要注意的是,这里使用的是纹理类型为 GL_TEXTURE_EXTERNAL_OES,需要在 shader 中使用 samplerExternalOES 类型。 这只是一个简单的示例,实际应用中需要根据需求进行修改和优化。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值