#include <chrono>
#include <string>
#include <jni.h>
#include <libusb/libusb.h>
#include "opencv2/core.hpp"
#include "depthai/depthai.hpp"
#include "utils.h"
using namespace std;
std::shared_ptr<dai::Device> device;
shared_ptr<dai::DataOutputQueue> qRgb, qDepth, qDet;
cv::Mat detection_img;
// Neural network
std::vector<uint8_t> model_buffer;
static std::atomic<bool> syncNN{true};
std::vector<dai::ImgDetection> detections;
// Closer-in minimum depth, disparity range is doubled (from 95 to 190):
static std::atomic<bool> extended_disparity{true};
auto maxDisparity = extended_disparity ? 190.0f :95.0f;
// Better accuracy for longer distance, fractional disparity 32-levels:
static std::atomic<bool> subpixel{false};
// Better handling for occlusions:
static std::atomic<bool> lr_check{false};
extern "C"
JNIEXPORT void JNICALL
Java_com_example_depthai_1android_1jni_1example_MainActivity_startDevice(JNIEnv *env, jobject thiz, jstring model_path,
int rgbWidth, int rgbHeight) {
// libusb
auto r = libusb_set_option(nullptr, LIBUSB_OPTION_ANDROID_JNIENV, env);
log("libusb_set_option ANDROID_JAVAVM: %s", libusb_strerror(r));
// Connect to device and start pipeline
device = make_shared<dai::Device>(dai::OpenVINO::VERSION_2021_4, dai::UsbSpeed::HIGH);
bool oakD = device->getConnectedCameras().size() == 3;
// Create pipeline
dai::Pipeline pipeline;
// Define source and output
从连接到手机的 OAK-D 设备获取 rgb 和视差图像的 Android 示例
于 2022-10-26 19:08:18 首次发布
本文提供了一个Android示例,演示如何从连接到手机的OAK-D设备中获取RGB和视差图像。通过利用计算机视觉技术和OpenCV库,实现了在移动平台上处理深度学习数据。
最低0.47元/天 解锁文章
1017

被折叠的 条评论
为什么被折叠?



