AI-ARCore Android版本使用-添加锚点-涂鸦功能

  • 前期准备

官网地址:

https://developers.google.cn/ar/develop/java/quickstart?hl=zh-cn

官方Demo地址:

https://github.com/google-ar/arcore-android-sdk/tree/master/samples/hello_ar_java

设备支持列表:(太多。大多华为机都被限制了 )

https://developers.google.cn/ar/devices?hl=zh-cn

手机安装ARCore的谷歌服务包Google Play Services for AR

谷歌商店:https://play.google.com/store/apps/details?id=com.google.ar.core

国内百度,各大下载器里都有。选个最高版本下载安装到手机。

  • 开发工具

AndroidStudio

需要仔细阅读或运行Demo和官网API的介绍,会对代码理解非常有帮助。

此外ARCore的demo中涉及大量GlES20/30 关于OpenGL相关知识点。比较深奥难懂,对于没有基础的确实比较难以理解。本文只能大致讲解表层的处理逻辑,抛砖引玉。

  • 开始嵌入
  1. Gradle引入:

dependencies {

    // ARCore (Google Play Services for AR) library.

    implementation 'com.google.ar:core:1.41.0'

    // Obj - a simple Wavefront OBJ file loader

    implementation 'de.javagl:obj:0.4.0'

}

  1. AndroidManifest.xml配置:

  <uses-permission android:name="android.permission.CAMERA"/>

  <!-- Limits app visibility in the Google Play Store to ARCore supported devices

       (https://developers.google.com/ar/devices). -->

  <uses-feature android:name="android.hardware.camera.ar" android:required="true"/>

  <uses-feature android:glEsVersion="0x00020000" android:required="true" />

Application节点内添加:

  <!-- Indicates whether "Google Play Services for AR" (ARCore) is "required" or "optional". -->

<meta-data android:name="com.google.ar.core" android:value="required" />

  1. 资源复制

Assets下拷贝资源模型数据:

  1. 关键代码复制

主要为demo中的common包下:

  1. 代码调用+布局设置

布局xml中需要放:GLSurfaceView 用于展现视频流画面

<android.opengl.GLSurfaceView

        android:visibility="visible"

        android:id="@+id/surfaceview"

        android:layout_width="match_parent"

        android:layout_height="match_parent"/>

Activity或fragment中引入:(这里只摘要部分代码,具体请参考demo中的代码)

初始化点击监听: tapHelper = new TapHelper(/* context= */ this);

(此处可以进行拓展自定义功能,处理额外业务逻辑)

 //用来使Session能够根据手机横竖屏,输出相应分辨率的数据

初始化分辨率:displayRotationHelper = new DisplayRotationHelper(/* context= */ this);

//绑定监听

surfaceView.setOnTouchListener(tapHelper);


// 初始化renderer.
render = new SampleRender(surfaceView, this, context.getAssets());

onResume中

判断ARcore服务是否安装

ArCoreApk.getInstance().requestInstall(context, !installRequested)

判断相机权限是否拥有

CameraPermissionHelper.hasCameraPermission(context)

初始化Session:

设置环境光模式、设置深度识别开关、即时放置模式、自动对焦模式(部分手机支持)

configureSession()

session.onresume();

onSurfaceCreated中:

准备渲染对象。涉及到读取着色器和3D模型文件。

涉及shader/mesh/render/texture   openGL模型这块的理念。比较深奥难懂。需要自行补充阅读理解一下相关知识点。

 try {

//点云背景显示设置。

      planeRenderer = new PlaneRenderer(render);

      backgroundRenderer = new BackgroundRenderer(render);

      virtualSceneFramebuffer = new Framebuffer(render, /* width= */ 1, /* height= */ 1);

      cubemapFilter =

          new SpecularCubemapFilter(

              render, CUBEMAP_RESOLUTION, CUBEMAP_NUMBER_OF_IMPORTANCE_SAMPLES);

      // Load DFG lookup table for environmental lighting

      dfgTexture =

          new Texture(

              render,

              Texture.Target.TEXTURE_2D,

              Texture.WrapMode.CLAMP_TO_EDGE,

              /* useMipmaps= */ false);

      // The dfg.raw file is a raw half-float texture with two channels.

      final int dfgResolution = 64;

      final int dfgChannels = 2;

      final int halfFloatSize = 2;

      ByteBuffer buffer =

          ByteBuffer.allocateDirect(dfgResolution * dfgResolution * dfgChannels * halfFloatSize);

      try (InputStream is = getAssets().open("models/dfg.raw")) {

        is.read(buffer.array());

      }

      // SampleRender abstraction leaks here.

      GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, dfgTexture.getTextureId());

      GLError.maybeThrowGLException("Failed to bind DFG texture", "glBindTexture");

      GLES30.glTexImage2D(

          GLES30.GL_TEXTURE_2D,

          /* level= */ 0,

          GLES30.GL_RG16F,

          /* width= */ dfgResolution,

          /* height= */ dfgResolution,

          /* border= */ 0,

          GLES30.GL_RG,

          GLES30.GL_HALF_FLOAT,

          buffer);

      GLError.maybeThrowGLException("Failed to populate DFG texture", "glTexImage2D");

      // Point cloud

      pointCloudShader =

          Shader.createFromAssets(

                  render,

                  "shaders/point_cloud.vert",

                  "shaders/point_cloud.frag",

                  /* defines= */ null)

              .setVec4(

                  "u_Color", new float[] {31.0f / 255.0f, 188.0f / 255.0f, 210.0f / 255.0f, 1.0f})

              .setFloat("u_PointSize", 5.0f);

      // four entries per vertex: X, Y, Z, confidence

      pointCloudVertexBuffer =

          new VertexBuffer(render, /* numberOfEntriesPerVertex= */ 4, /* entries= */ null);

      final VertexBuffer[] pointCloudVertexBuffers = {pointCloudVertexBuffer};

      pointCloudMesh =

          new Mesh(

              render, Mesh.PrimitiveMode.POINTS, /* indexBuffer= */ null, pointCloudVertexBuffers);

这里可以根据业务需要,预先加载不同的锚点样式(场景:例如我需要箭头、地标、圆圈的3D模型锚点),创建不同的Mesh和Shader。在onDrawFrame中根据类型显示不同的3D模型锚点。缺点:加载过多会导致卡顿(模型贴图、模型不宜过大)

      // Virtual object to render (ARCore pawn)

      virtualObjectAlbedoTexture =

          Texture.createFromAsset(

              render,

              "models/pawn_albedo.png",

              Texture.WrapMode.CLAMP_TO_EDGE,

              Texture.ColorFormat.SRGB);

      virtualObjectAlbedoInstantPlacementTexture =

          Texture.createFromAsset(

              render,

              "models/pawn_albedo_instant_placement.png",

              Texture.WrapMode.CLAMP_TO_EDGE,

              Texture.ColorFormat.SRGB);

      Texture virtualObjectPbrTexture =

          Texture.createFromAsset(

              render,

              "models/pawn_roughness_metallic_ao.png",

              Texture.WrapMode.CLAMP_TO_EDGE,

              Texture.ColorFormat.LINEAR);

      virtualObjectMesh = Mesh.createFromAsset(render, "models/pawn.obj");

      virtualObjectShader =

          Shader.createFromAssets(

                  render,

                  "shaders/environmental_hdr.vert",

                  "shaders/environmental_hdr.frag",

                  /* defines= */ new HashMap<String, String>() {

                    {

                      put(

                          "NUMBER_OF_MIPMAP_LEVELS",

                          Integer.toString(cubemapFilter.getNumberOfMipmapLevels()));

                    }

                  })

              .setTexture("u_AlbedoTexture", virtualObjectAlbedoTexture)

              .setTexture("u_RoughnessMetallicAmbientOcclusionTexture", virtualObjectPbrTexture)

              .setTexture("u_Cubemap", cubemapFilter.getFilteredCubemapTexture())

              .setTexture("u_DfgTexture", dfgTexture);

    } catch (IOException e) {

      Log.e(TAG, "Failed to read a required asset file", e);

      messageSnackbarHelper.showError(this, "Failed to read a required asset file: " + e);

    }

onDrawFrame中:

//画面渲染 预处理地方  其内 handleTap()方法 用于点击事件转换锚点事件,此处可以进行业务逻辑的拓展:例如设置锚点数量上限、对点击事件进行区分(是否为己方点击)

if (session == null) {

      return;

    }

    // Texture names should only be set once on a GL thread unless they change. This is done during

    // onDrawFrame rather than onSurfaceCreated since the session is not guaranteed to have been

    // initialized during the execution of onSurfaceCreated.

    if (!hasSetTextureNames) {

      session.setCameraTextureNames(

          new int[] {backgroundRenderer.getCameraColorTexture().getTextureId()});

      hasSetTextureNames = true;

    }

    // -- Update per-frame state

    // Notify ARCore session that the view size changed so that the perspective matrix and

    // the video background can be properly adjusted.

    displayRotationHelper.updateSessionIfNeeded(session);

    // Obtain the current frame from the AR Session. When the configuration is set to

    // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the

    // camera framerate.

    Frame frame;

    try {

      frame = session.update();

    } catch (CameraNotAvailableException e) {

      Log.e(TAG, "Camera not available during onDrawFrame", e);

      messageSnackbarHelper.showError(this, "Camera not available. Try restarting the app.");

      return;

    }

    Camera camera = frame.getCamera();

    // Update BackgroundRenderer state to match the depth settings.

    try {

      backgroundRenderer.setUseDepthVisualization(

          render, depthSettings.depthColorVisualizationEnabled());

      backgroundRenderer.setUseOcclusion(render, depthSettings.useDepthForOcclusion());

    } catch (IOException e) {

      Log.e(TAG, "Failed to read a required asset file", e);

      messageSnackbarHelper.showError(this, "Failed to read a required asset file: " + e);

      return;

    }

    // BackgroundRenderer.updateDisplayGeometry must be called every frame to update the coordinates

    // used to draw the background camera image.

    backgroundRenderer.updateDisplayGeometry(frame);

    if (camera.getTrackingState() == TrackingState.TRACKING

        && (depthSettings.useDepthForOcclusion()

            || depthSettings.depthColorVisualizationEnabled())) {

      try (Image depthImage = frame.acquireDepthImage16Bits()) {

        backgroundRenderer.updateCameraDepthTexture(depthImage);

      } catch (NotYetAvailableException e) {

        // This normally means that depth data is not available yet. This is normal so we will not

        // spam the logcat with this.

      }

    }

    // Handle one tap per frame.

    handleTap(frame, camera);

    // Keep the screen unlocked while tracking, but allow it to lock when tracking stops.

    trackingStateHelper.updateKeepScreenOnFlag(camera.getTrackingState());

    // Show a message based on whether tracking has failed, if planes are detected, and if the user

    // has placed any objects.

    String message = null;

    if (camera.getTrackingState() == TrackingState.PAUSED) {

      if (camera.getTrackingFailureReason() == TrackingFailureReason.NONE) {

        message = SEARCHING_PLANE_MESSAGE;

      } else {

        message = TrackingStateHelper.getTrackingFailureReasonString(camera);

      }

    } else if (hasTrackingPlane()) {

      if (wrappedAnchors.isEmpty()) {

        message = WAITING_FOR_TAP_MESSAGE;

      }

    } else {

      message = SEARCHING_PLANE_MESSAGE;

    }

    if (message == null) {

      messageSnackbarHelper.hide(this);

    } else {

      messageSnackbarHelper.showMessage(this, message);

    }

    // -- Draw background

    if (frame.getTimestamp() != 0) {

      // Suppress rendering if the camera did not produce the first frame yet. This is to avoid

      // drawing possible leftover data from previous sessions if the texture is reused.

      backgroundRenderer.drawBackground(render);

    }

    // If not tracking, don't draw 3D objects.

    if (camera.getTrackingState() == TrackingState.PAUSED) {

      return;

    }

    // -- Draw non-occluded virtual objects (planes, point cloud)

    // Get projection matrix.

    camera.getProjectionMatrix(projectionMatrix, 0, Z_NEAR, Z_FAR);

    // Get camera matrix and draw.

    camera.getViewMatrix(viewMatrix, 0);

    // Visualize tracked points.

    // Use try-with-resources to automatically release the point cloud.

    try (PointCloud pointCloud = frame.acquirePointCloud()) {

      if (pointCloud.getTimestamp() > lastPointCloudTimestamp) {

        pointCloudVertexBuffer.set(pointCloud.getPoints());

        lastPointCloudTimestamp = pointCloud.getTimestamp();

      }

      Matrix.multiplyMM(modelViewProjectionMatrix, 0, projectionMatrix, 0, viewMatrix, 0);

      pointCloudShader.setMat4("u_ModelViewProjection", modelViewProjectionMatrix);

      render.draw(pointCloudMesh, pointCloudShader);

    }

    // Visualize planes.

    planeRenderer.drawPlanes(

        render,

        session.getAllTrackables(Plane.class),

        camera.getDisplayOrientedPose(),

        projectionMatrix);

    // -- Draw occluded virtual objects

    // Update lighting parameters in the shader

    updateLightEstimation(frame.getLightEstimate(), viewMatrix);

    // Visualize anchors created by touch.

    render.clear(virtualSceneFramebuffer, 0f, 0f, 0f, 0f);

    for (WrappedAnchor wrappedAnchor : wrappedAnchors) {

      Anchor anchor = wrappedAnchor.getAnchor();

      Trackable trackable = wrappedAnchor.getTrackable();

      if (anchor.getTrackingState() != TrackingState.TRACKING) {

        continue;

      }

      // Get the current pose of an Anchor in world space. The Anchor pose is updated

      // during calls to session.update() as ARCore refines its estimate of the world.

      anchor.getPose().toMatrix(modelMatrix, 0);

      // Calculate model/view/projection matrices

      Matrix.multiplyMM(modelViewMatrix, 0, viewMatrix, 0, modelMatrix, 0);

      Matrix.multiplyMM(modelViewProjectionMatrix, 0, projectionMatrix, 0, modelViewMatrix, 0);

      // Update shader properties and draw

      virtualObjectShader.setMat4("u_ModelView", modelViewMatrix);

      virtualObjectShader.setMat4("u_ModelViewProjection", modelViewProjectionMatrix);

      if (trackable instanceof InstantPlacementPoint

          && ((InstantPlacementPoint) trackable).getTrackingMethod()

              == InstantPlacementPoint.TrackingMethod.SCREENSPACE_WITH_APPROXIMATE_DISTANCE) {

        virtualObjectShader.setTexture(

            "u_AlbedoTexture", virtualObjectAlbedoInstantPlacementTexture);

      } else {

        virtualObjectShader.setTexture("u_AlbedoTexture", virtualObjectAlbedoTexture);

      }

      render.draw(virtualObjectMesh, virtualObjectShader, virtualSceneFramebuffer);

    }

    // Compose the virtual scene with the background.

    backgroundRenderer.drawVirtualScene(render, virtualSceneFramebuffer, Z_NEAR, Z_FAR);

点击添加锚点不规则,非圆形,点击锚点后方向偏转以及模型过大问题:

可在onDrawFrame中

 float[] scaleMatrixReset = new float[16];

 scaleMatrixReset = modelMatrix.clone();

  scaleMatrixReset[0] = 0.001f;//大小比例系数

  scaleMatrixReset[5] = 0.001f;

  scaleMatrixReset[10] = 0.001f;

  scaleMatrixReset[1] = 0f;

  scaleMatrixReset[2] = 0f;

  scaleMatrixReset[4] = 0f;

  scaleMatrixReset[6] = 0f;

  scaleMatrixReset[8] = 0f;

  scaleMatrixReset[9] = 0f;

Matrix.multiplyMM(modelViewMatrix, 0, viewMatrix, 0, scaleMatrixReset, 0);

关于涂鸦功能:

参考:GitHub - googlecreativelab/ar-drawing-java: A simple AR drawing experiment build in Java using ARCore.  这个是一个独立的涂鸦功能。

若想把涂鸦和锚点合并到一个项目,除了必要的界面UI和切换逻辑外,两者代码融合后,

需要注意地方:

在涂鸦渲染的时候  LineShaderRenderer.updateStrokes() 可以进行业务逻辑处理:比如线条的颜色设置。

LineShaderRenderer.draw()  其draw方法可以进行逻辑处理比如:

这里是锚点和涂鸦同时存在时,需要特殊处理下。

此外还有一些大神的详解:https://zhuanlan.zhihu.com/p/136478509

ARCore视频流如何获取?

 int size = surfaceView.getWidth() * surfaceView.getHeight();

            int width = surfaceView.getWidth();

            int height = surfaceView.getHeight();

            final int b[] = new int[size];

            IntBuffer ib = IntBuffer.wrap(b);

            ib.position(0);

            gl.glReadPixels(0, 0, surfaceView.getWidth(), surfaceView.getHeight(),

                    GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, ib);

得到的ib为 intBuffer 根据需要转换类型。

下面为一些类型的转换方法。

RGBA 转 ARGB

int[] data = ib.array();//RGBA->ARGB

        for (int row = 0; row < height; row++) {

            for (int col = 0; col < width / 2; col++) {

                int currentIndex = row * width + col;

                int opIndex = (row + 1) * width - col - 1;

                // 水平镜像

                int tmp = data[currentIndex];

                data[currentIndex] = data[opIndex];

                data[opIndex] = tmp;

                int aa = data[currentIndex] >> 24;

                int rr = (data[currentIndex] >> 16) & 0xFF;

                int gg = (data[currentIndex] >> 8) & 0xFF;

                int bb = data[currentIndex] & 0xFF;

                data[currentIndex] = (aa << 24) | (bb << 16) | (gg << 8) | rr;

                aa = data[opIndex] >> 24;

                bb = (data[opIndex] >> 16) & 0xFF;

                gg = (data[opIndex] >> 8) & 0xFF;

                rr = data[opIndex] & 0xFF;

                data[opIndex] = (aa << 24) | (rr << 16) | (gg << 8) | bb;

            }

        }

 /**

     * ARGB数据转化为NV21数据

     *

     * @param argb   argb数据

     * @param width  宽度

     * @param height 高度

     * @return nv21数据

     */

    public static byte[] argbToNv21(int[] argb, int width, int height) {

        int frameSize = width * height;

        int yIndex = 0;

        int uvIndex = frameSize;

        int index = 0;

        byte[] nv21 = new byte[width * height * 3 / 2];

        for (int j = 0; j < height; ++j) {

            for (int i = 0; i < width; ++i) {

                int R = (argb[index] & 0xFF0000) >> 16;

                int G = (argb[index] & 0x00FF00) >> 8;

                int B = argb[index] & 0x0000FF;

                int Y = (66 * R + 129 * G + 25 * B + 128 >> 8) + 16;

                int U = (-38 * R - 74 * G + 112 * B + 128 >> 8) + 128;

                int V = (112 * R - 94 * G - 18 * B + 128 >> 8) + 128;

                nv21[yIndex++] = (byte) (Y < 0 ? 0 : (Y > 255 ? 255 : Y));

                if (j % 2 == 0 && index % 2 == 0 && uvIndex < nv21.length - 2) {

                    nv21[uvIndex++] = (byte) (V < 0 ? 0 : (V > 255 ? 255 : V));

                    nv21[uvIndex++] = (byte) (U < 0 ? 0 : (U > 255 ? 255 : U));

                }

                ++index;

            }

        }

        return nv21;

    }

  • 15
    点赞
  • 18
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值