Android JNI利用opengl渲染文字 (三)

4 篇文章 0 订阅

这篇文章需要渲染一个文字渲染出来。文章的来源参考https://learnopengl-cn.github.io/06%20In%20Practice/02%20Text%20Rendering/

opengl渲染文字的原理

freetype的作用是把字体文件从一个文件变成一个点阵图,并且会计算出文字的大小。对于opengl,只需要把这个点阵图转换成texture传入显卡中,当渲染的时候,把点阵图加上颜色即可。

渲染文字的shader

这里,创建shader,创建program等等操作就不再赘述了,可以直接参考代码。下面分析一下vertex shader和fragment shader。vertex shader比较简单,aPosition表示的是需要绘制的大小,aTextCoord表示纹理的坐标,这里需要翻转一下,并传给vertex shader,vertex shader从texture中读取alpha通道,然后与一个颜色混合,这里的颜色的红色。

#define GET_STR(x) #x
static const char *textVertexShader = GET_STR(
        attribute vec4 aPosition;
        attribute
        vec2 aTextCoord;//输入的纹理坐标,会在程序指定将数据输入到该字段
        varying
        vec2 vTextCoord;
        void main() {
            vTextCoord =  vec2( aTextCoord.x, 1.0-aTextCoord.y);;
            gl_Position = aPosition;
        }
);
static const char *textFragmentShader = GET_STR(
        varying
        vec2 vTextCoord;
        uniform
        sampler2D yTexture;
        void main() {
            vec4 sampled = vec4(1.0, 1.0, 1.0, texture2D(yTexture, vTextCoord).a);
            gl_FragColor =  vec4(1.0,0.0,0.0,1.0)*sampled;
        }
);

加载字母,生成Texture

首先一定要设置一下字体大小,然后从字符串的第一个读取字母对应的字体图像,创建texture要注意,类型一定要选GL_ALPHA(原教程中GL_RED总是出问题,很奇怪),下面的参数就是常规操作,不用在意,这么写就行了。

    FT_Set_Pixel_Sizes(g_face, 0, 48);
    char * chinese_str = "aaa";
    FT_Load_Char(g_face, chinese_str[0], FT_LOAD_RENDER);

    glTexImage2D(
        GL_TEXTURE_2D,
        0,
        GL_ALPHA,
        g_face->glyph->bitmap.width,
        g_face->glyph->bitmap.rows,
        0,
        GL_ALPHA,
        GL_UNSIGNED_BYTE,
        g_face->glyph->bitmap.buffer
    );

    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

注意

这两行代码一定要加,不然就只能绘制一个红方块。

glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

代码已经开源:
https://github.com/Niap/OpenglFreetypeDemo.git

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
首先,需要在 C++ 中使用 FFmpeg 库来解码 MP4 文件,然后将解码后的视频帧传递给 OpenGL ES 进行渲染。这个过程需要用到 JNI 技术,将 C++ 的代码嵌入到 Java 中。 下面是一个简单的 JNI OpenGL ES 渲染 MP4 的代码示例: ```java public class VideoRenderer { static { System.loadLibrary("native-lib"); } public native void init(String videoPath, Surface surface); public native void render(); public native void release(); } public class MainActivity extends Activity implements SurfaceHolder.Callback { private SurfaceView mSurfaceView; private SurfaceHolder mSurfaceHolder; private VideoRenderer mRenderer; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mSurfaceView = (SurfaceView) findViewById(R.id.surface_view); mSurfaceHolder = mSurfaceView.getHolder(); mSurfaceHolder.addCallback(this); } @Override public void surfaceCreated(SurfaceHolder holder) { mRenderer = new VideoRenderer(); mRenderer.init("/sdcard/video.mp4", holder.getSurface()); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } @Override public void surfaceDestroyed(SurfaceHolder holder) { mRenderer.release(); } } ``` native-lib.cpp: ```c++ #include "native-lib.h" #include <jni.h> #include <android/native_window_jni.h> #include <android/log.h> #include <EGL/egl.h> #include <GLES/gl.h> #include <GLES2/gl2.h> #include <GLES2/gl2ext.h> #include <opencv2/opencv.hpp> #include <opencv2/core/mat.hpp> #include <opencv2/imgproc/types_c.h> #include <libavutil/imgutils.h> #include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libswscale/swscale.h> #define TAG "OpenGLRenderer" #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__) #define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__) using namespace cv; extern "C" { static const GLfloat VERTEX_BUF[] = { -1.0f, -1.0f, 0.0f, 1.0f, -1.0f, 0.0f, -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f }; static const GLfloat TEX_COORD_BUF[] = { 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f }; static const char *videoPath = NULL; static AVFormatContext *pFormatCtx = NULL; static AVCodecContext *pCodecCtx = NULL; static AVCodec *pCodec = NULL; static int videoStream = -1; static AVFrame *pFrame = NULL; static AVFrame *pFrameRGB = NULL; static uint8_t *buffer = NULL; static struct SwsContext *sws_ctx = NULL; static ANativeWindow *nativeWindow = NULL; static ANativeWindow_Buffer windowBuffer; static GLuint program = 0; static GLuint texture = 0; static GLuint vertexShader = 0; static GLuint fragmentShader = 0; static GLuint positionHandle = 0; static GLuint texCoordHandle = 0; static GLuint textureHandle = 0; static void checkGlError(const char *op) { GLint error; for (error = glGetError(); error; error = glGetError()) { LOGE("after %s() glError (0x%x)\n", op, error); } } static GLuint loadShader(GLenum type, const char *shaderCode) { GLuint shader = glCreateShader(type); glShaderSource(shader, 1, &shaderCode, NULL); glCompileShader(shader); return shader; } static GLuint createProgram(const char *vertexShaderCode, const char *fragmentShaderCode) { GLuint vertexShader = loadShader(GL_VERTEX_SHADER, vertexShaderCode); GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode); GLuint program = glCreateProgram(); glAttachShader(program, vertexShader); glAttachShader(program, fragmentShader); glLinkProgram(program); return program; } static void initShader() { const char *vertexShaderCode = "attribute vec4 position;\n" "attribute vec2 texCoord;\n" "varying vec2 vTexCoord;\n" "void main() {\n" " gl_Position = position;\n" " vTexCoord = texCoord;\n" "}"; const char *fragmentShaderCode = "precision mediump float;\n" "varying vec2 vTexCoord;\n" "uniform sampler2D texture;\n" "void main() {\n" " gl_FragColor = texture2D(texture, vTexCoord);\n" "}"; program = createProgram(vertexShaderCode, fragmentShaderCode); positionHandle = glGetAttribLocation(program, "position"); glEnableVertexAttribArray(positionHandle); glVertexAttribPointer(positionHandle, 3, GL_FLOAT, GL_FALSE, 0, VERTEX_BUF); texCoordHandle = glGetAttribLocation(program, "texCoord"); glEnableVertexAttribArray(texCoordHandle); glVertexAttribPointer(texCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, TEX_COORD_BUF); textureHandle = glGetUniformLocation(program, "texture"); } static void initTexture() { glGenTextures(1, &texture); glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); } static void initDecoder() { av_register_all(); avformat_network_init(); pFormatCtx = avformat_alloc_context(); if (avformat_open_input(&pFormatCtx, videoPath, NULL, NULL) != 0) { LOGE("Couldn't open file: %s\n", videoPath); return; } if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { LOGE("Couldn't find stream information.\n"); return; } for (int i = 0; i < pFormatCtx->nb_streams; i++) { if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { videoStream = i; break; } } if (videoStream == -1) { LOGE("Couldn't find video stream.\n"); return; } pCodecCtx = avcodec_alloc_context3(NULL); avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoStream]->codecpar); pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if (pCodec == NULL) { LOGE("Codec not found.\n"); return; } if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { LOGE("Could not open codec.\n"); return; } pFrame = av_frame_alloc(); pFrameRGB = av_frame_alloc(); int frameSize = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1); buffer = (uint8_t *) av_malloc(frameSize * sizeof(uint8_t)); av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1); sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL); } static void initEGL(Surface *surface) { EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY); if (display == EGL_NO_DISPLAY) { LOGE("eglGetDisplay() failed.\n"); return; } EGLint majorVersion, minorVersion; if (eglInitialize(display, &majorVersion, &minorVersion) == EGL_FALSE) { LOGE("eglInitialize() failed.\n"); return; } EGLint numConfigs; EGLConfig config; EGLint configAttribs[] = { EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_BLUE_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_RED_SIZE, 8, EGL_DEPTH_SIZE, 24, EGL_NONE }; if (eglChooseConfig(display, configAttribs, &config, 1, &numConfigs) == EGL_FALSE) { LOGE("eglChooseConfig() failed.\n"); return; } EGLint contextAttribs[] = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE }; EGLContext context = eglCreateContext(display, config, EGL_NO_CONTEXT, contextAttribs); if (context == EGL_NO_CONTEXT) { LOGE("eglCreateContext() failed.\n"); return; } EGLSurface eglSurface = eglCreateWindowSurface(display, config, surface, NULL); if (eglSurface == EGL_NO_SURFACE) { LOGE("eglCreateWindowSurface() failed.\n"); return; } if (eglMakeCurrent(display, eglSurface, eglSurface, context) == EGL_FALSE) { LOGE("eglMakeCurrent() failed.\n"); return; } initShader(); initTexture(); } JNIEXPORT void JNICALL Java_com_example_VideoRenderer_init(JNIEnv *env, jobject instance, jstring videoPath_, jobject surface) { videoPath = env->GetStringUTFChars(videoPath_, 0); nativeWindow = ANativeWindow_fromSurface(env, surface); initDecoder(); initEGL(nativeWindow); } JNIEXPORT void JNICALL Java_com_example_VideoRenderer_render(JNIEnv *env, jobject instance) { ANativeWindow_acquire(nativeWindow); if (ANativeWindow_lock(nativeWindow, &windowBuffer, NULL) == 0) { AVPacket packet; int frameFinished; while (av_read_frame(pFormatCtx, &packet) >= 0) { if (packet.stream_index == videoStream) { avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); if (frameFinished) { sws_scale(sws_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize); glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture); glTexImage2D(GL_TEXTURE_EXTERNAL_OES, 0, GL_RGBA, pCodecCtx->width, pCodecCtx->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, pFrameRGB->data[0]); glUseProgram(program); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture); glUniform1i(textureHandle, 0); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); eglSwapBuffers(eglGetDisplay(EGL_DEFAULT_DISPLAY), eglGetCurrentSurface(EGL_DRAW)); } } av_packet_unref(&packet); } } ANativeWindow_unlockAndPost(nativeWindow); ANativeWindow_release(nativeWindow); } JNIEXPORT void JNICALL Java_com_example_VideoRenderer_release(JNIEnv *env, jobject instance) { av_frame_free(&pFrame); av_frame_free(&pFrameRGB); avcodec_close(pCodecCtx); avformat_close_input(&pFormatCtx); avformat_network_deinit(); ANativeWindow_release(nativeWindow); eglTerminate(eglGetDisplay(EGL_DEFAULT_DISPLAY)); } } ``` 需要注意的是,这里使用的是纹理类型为 GL_TEXTURE_EXTERNAL_OES,需要在 shader 中使用 samplerExternalOES 类型。 这只是一个简单的示例,实际应用中需要根据需求进行修改和优化。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值