最近公司接到一个关于Kinect的项目,就要用到Java获取Kinect的视频流,苦逼的我从来没搞过这东西,只能死坑研究,还好有bytedeco团队的github项目(https://github.com/bytedeco/javacpp-presets/tree/master/libfreenect2),才让我脱离困境。结果我以为我做完了,其实是新的厄运的开始,这个库只支持x86的CPU架构,并不支持ARM64的CPU架构,因此我写的所有代码全部作废,也就可以给大家看看了,毕竟功能已经实现了,只是无法在客户到ARM64位平台上运行罢了,最终使用是Python解决的,当然Python这边的代码就不能给大家看啦(害我又多会了一门语言,(*^__^*) 嘻嘻……)
import org.bytedeco.javacpp.BytePointer;
import org.bytedeco.javacpp.Loader;
import org.bytedeco.libfreenect2.CpuPacketPipeline;
import org.bytedeco.libfreenect2.Frame;
import org.bytedeco.libfreenect2.FrameMap;
import org.bytedeco.libfreenect2.Freenect2;
import org.bytedeco.libfreenect2.Freenect2Device;
import org.bytedeco.libfreenect2.PacketPipeline;
import org.bytedeco.libfreenect2.SyncMultiFrameListener;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.awt.image.Raster;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import javax.imageio.ImageIO;
/**
* Created by root on 2020/9/10.
* Usage:
*/
public class KinectManager {
public static int frameCount = 0;
/**
* Run Kinect and get rgb byte array, you can output this byteArray as image
* Warning: one method must not be too long, or .so file will throw exception!!
*
* @param count frame count to run
* @param onGetDataCallback on get rgb byte array
*/
public void run(int count, OnGetDataCallback onGetDataCallback) {
Freenect2 freenect2Context;
try {
Loader.load(org.bytedeco.libfreenect2.global.freenect2.class);
// Context is shared accross cameras.
freenect2Context = new Freenect2();
} catch (Exception e) {
System.out.println("Exception in the TryLoad !" + e);
e.printStackTrace();
return;
}
Freenect2Device device = null;
PacketPipeline pipeline = null;
String serial = "";
// Only CPU pipeline tested.
pipeline = new CpuPacketPipeline();
if (serial == "") {
serial = freenect2Context.getDefaultDeviceSerialNumber().getString();
System.out.println("Serial:" + serial);
}
device = freenect2Context.openDevice(serial, pipeline);
int types = 0;
types |= Frame.Color;
types |= Frame.Ir | Frame.Depth;
SyncMultiFrameListener listener = new SyncMultiFrameListener(types);
device.setColorFrameListener(listener);
device.setIrAndDepthFrameListener(listener);
device.start();
System.out.println("Serial: " + device.getSerialNumber().getString());
System.out.println("Firmware: " + device.getFirmwareVersion().getString());
if (count == -1) {
count = Integer.MAX_VALUE;
}
FrameMap frames = new FrameMap();
for (int i = 0; i < count; i++) {
System.out.println("getting frame " + frameCount);
if (!listener.waitForNewFrame(frames, 10 * 1000)) { // 10 sconds
System.out.println("timeout!");
return;
}
Frame rgb = frames.get(Frame.Color);
int width = (int) rgb.width();
int height = (int) rgb.height();
int totalByteOneFrame = (int) (width * height * rgb.bytes_per_pixel());
System.out.println("RGB image, w:" + width + " " + height + " bytes_per_pixel is " + rgb.bytes_per_pixel() + " totalByteOneFrame is " + totalByteOneFrame);
BytePointer data = rgb.data();
// INFO: Dechert: 2020/9/10 Get all bytes into a byteBuffer in one frame
ByteBuffer byteBuffer = data.position(0).limit(totalByteOneFrame).asByteBuffer();
byte[] bgrxArray = new byte[totalByteOneFrame];
// INFO: Dechert: 2020/9/10 Fetch all bytes into byte array
byteBuffer.get(bgrxArray);
byte[] rgbArray = getRgbArray(width, height, bgrxArray);
onGetDataCallback.onGet(rgbArray);
frameCount++;
listener.release(frames);
}
device.stop();
device.close();
}
public byte[] getRgbArray(int width, int height, byte[] bgrxArray) {
int rgbLength = height * width * 3;
byte[] rgbArray = new byte[rgbLength];
int pixelIndex = 0;
for (int bgrxIndex = 0; bgrxIndex < bgrxArray.length; bgrxIndex++) {
int bgrx = bgrxIndex % 4;
switch (bgrx) {
case 0: {
byte blue = bgrxArray[bgrxIndex];
rgbArray[pixelIndex * 3 + 2] = blue;
break;
}
case 1: {
byte green = bgrxArray[bgrxIndex];
rgbArray[pixelIndex * 3 + 1] = green;
break;
}
case 2: {
byte red = bgrxArray[bgrxIndex];
rgbArray[pixelIndex * 3] = red;
break;
}
case 3: {
pixelIndex++;
break;
}
}
}
return rgbArray;
}
public void outputImage(byte[] bgrArray) {
// INFO: Dechert: 2020/9/10 Set bgrArray into image
BufferedImage image = new BufferedImage(1920, 1080, BufferedImage.TYPE_3BYTE_BGR);
Raster raster = image.getRaster();
DataBufferByte dataBuffer = (DataBufferByte) raster.getDataBuffer();
ByteBuffer.wrap(dataBuffer.getData()).put(bgrArray);
// INFO: Dechert: 2020/9/10 Output Kinect image
try {
long startWrite = System.currentTimeMillis();
ImageIO.write(image, "jpeg", new File("/home/kinect/Temp/output2/image_" + System.currentTimeMillis() + ".jpeg"));
long endWrite = System.currentTimeMillis();
System.out.println("write time is " + (endWrite - startWrite));
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static void main(String[] args) {
KinectManager kinectManager = new KinectManager();
kinectManager.run(10, new OnGetDataCallback() {
@Override
public void onGet(byte[] rgbArray) {
kinectManager.outputImage(rgbArray);
}
});
}
要运行这段代码到前提是环境都已经准备好了,Kinect V2已经连接到电脑了。
注意:这个库有问题,一个方法中运行到时间不能太长,否则会报错。