#include "Aria.h"
#include <iostream>
#include <sstream>
#include <iomanip>
#include <fstream>
#include <math.h>
#include <ctime>
#include <string>
#include "camera.h"
#include <opencv2/opencv.hpp>
#include "ARToolKitPlus/TrackerSingleMarkerImpl.h"
#include <cstdlib>
#include <vector>
#include <kalman/kvector.hpp>
#include <conio.h>
#include "my_efk.h"
//typedef KVector<double, 1, false> myVector;
#define PI 3.1416926
#define Time 2
using namespace std;
using namespace Kalman;
using namespace cv;
class MyLogger : public ARToolKitPlus::Logger
{
void artLog(const char* nStr)
{
printf(nStr);
}
};
int main(int argc, char **argv)
{
/*********************************************/
//获取当前系统的时间
time_t now = time(NULL);
struct tm timeinfo = *localtime(&now);
char buf[40];
strftime(buf,sizeof(buf),"%Y%m%d%H%M%S",&timeinfo);
string a=buf;
cout<<a<<endl;
/*********************************************/
DHcamera camera;
camera.cameraini();
camera.improveini();
const bool useBCH = true;
const int width = 656, height = 492, bpp = 1,
numPixels = width*height*bpp;
size_t numBytesRead;
Mat img;
namedWindow("picture");
while (waitKey(30) != 13)
{
camera.getimage(img);
imshow("picture",img);
}
destroyAllWindows();
cvtColor(img,img,CV_RGB2GRAY);
ARToolKitPlus::TrackerSingleMarker *tracker = new ARToolKitPlus::TrackerSingleMarkerImpl<6,6,6,
ARToolKitPlus::PIXEL_FORMAT_LUM, 1, 8>(width,height);
const char* description = tracker->getDescription();
printf("ARToolKitPlus compile-time information:\n%s\n\n", description);
if(!tracker->init("D:\\data\\camera\\camera.cal", 1.0f, 1000.0f)) // load MATLAB file
{
printf("ERROR: init() failed\n");
//delete cameraBuffer;
delete tracker;
return -1;
}
//将默认的320*240的图像改为640*480的图像
tracker->changeCameraSize(width,height);
// define size of the marker
tracker->setPatternWidth(144);
// the marker in the BCH test image has a thin border...
tracker->setBorderWidth(useBCH ? 0.125f : 0.250f);
// set a threshold. alternatively we could also activate automatic thresholding
//tracker->setThreshold(100);
tracker->activateAutoThreshold(true);
// let's use lookup-table undistortion for high-speed
// note: LUT only works with images up to 1024x1024
tracker->setUndistortionMode(ARToolKitPlus::UNDIST_STD);
// RPP is more robust than ARToolKit's standard pose estimator
tracker->setPoseEstimator(ARToolKitPlus::POSE_ESTIMATOR_RPP);
// switch to simple ID based markers
// use the tool in tools/IdPatGen to generate markers
tracker->setMarkerMode(useBCH ? ARToolKitPlus::MARKER_ID_BCH : ARToolKitPlus::MARKER_ID_SIMPLE);
// do the OpenGL camera setup
//glMatrixMode(GL_PROJECTION)
//glLoadMatrixf(tracker->getProjectionMatrix());
// here we go, just one call to find the camera pose
//cout<<"单击回车进行测算"<<endl;
/*********************************************/
//卡尔曼滤波的初始化
my_EKF filter;
static const double _p0[] = {1,0,0,
0,1,0,
0,0,1};
const unsigned nn = 3;
my_EKF::Vector x(nn);
my_EKF::Vector u(2);
u(1) = 0;
u(2) = 0;
my_EKF::Vector z(2);
Matrix p0(3,3,_p0);
x(1) = 0;
x(2) = 500;
x(3) = -PI/2.0;
filter.init(x,p0);
//aria的初始化
Aria::init();
//waitKey();
ArArgumentParser argParser(&argc, argv);
argParser.loadDefaultArguments();
ArRobot robot;
ArRobotConnector con(&argParser, &robot);
if(!Aria::parseArgs())
{
Aria::logOptions();
Aria::exit(1);
waitKey();
return 1;
}
if(!con.connectRobot())
{
ArLog::log(ArLog::Normal, "directMotionExample: Could not connect to the robot. Exiting.");
if(argParser.checkHelpAndWarnUnparsed())
{
Aria::logOptions();
}
Aria::exit(1);
waitKey();
return 1;
}
ArLog::log(ArLog::Normal, "directMotionExample: Connected.");
if(!Aria::parseArgs() || !argParser.checkHelpAndWarnUnparsed())
{
Aria::logOptions();
waitKey();
Aria::exit(1);
}
//waitKey();
// Run the robot processing cycle in its own thread. Note that after starting this
// thread, we must lock and unlock the ArRobot object before and after
// accessing it.
ArPose currentpose;
ArPose inipose(0,505,-90);
robot.moveTo(inipose);
robot.comInt(ArCommands::ENABLE,1);
robot.comInt(ArCommands::SOUNDTOG,1);
robot.runAsync(false);
//robot.lock();
//currentpose = robot.getPose();
//robot.moveTo(inipose,currentpose);
//robot.moveTo(inipose);
//robot.unlock();
//ArUtil::sleep(2000);
//double err[100];
//float dist[20];
vector<ArPose> robotposelist;
vector<float> dist;
vector<int> isekf;
//float theta_[20];
vector<float> theta_;
//float xout[20],yout[20],thetaout[20];
vector<float> xout,yout,thetaout;
char c;
stringstream ss;
string picfile,picnumber,picformat,picname;
//picfile = "D:\\data\\picture\\";
//picformat = ".jpg";
//namedWindow("picture");
for (int i = 0;; ++i)
{
//ss<<i;
//ss>>picnumber;
//picname = picfile+picnumber+picformat;
currentpose = robot.getPose();
robotposelist.push_back(currentpose);
namedWindow("picture");
camera.getimage(img);
//imwrite(picname,img);
imshow("picture",img);
waitKey();
destroyAllWindows();
cvtColor(img,img,CV_RGB2GRAY);
//tracker->calc(img.data);
tracker->setPatternWidth(144);
int markerId = tracker->calc(img.data);
float conf = tracker->getConfidence();
Matx44d T;
for (int i = 0;i < 4;i ++)
{
for (int j = 0;j < 4;j ++)
{
T(i,j) = tracker->getModelViewMatrix()[i*4 + j];
}
}
cout<<"T="<<endl;
cout<<T<<endl;
Matx13d Pw(T(3,0),T(3,1),T(3,2));
//int markerId = tracker->calc(img.data);
cout<<"识别到的maker是:"<<markerId<<endl;
//float conf = tracker->getConfidence();
cout<<"置信概率是:"<<(int)(conf*100.0f)<<"%"<<endl;
if ((int)(conf*100.0f) < 90)
{
markerId = -1;
}
if (markerId == -1)
{
isekf.push_back(-1);
}
else
{
isekf.push_back(1);
}
cout<<"标志点在相机坐标系中的坐标:"<<endl;
cout<<Pw<<endl;
double d = sqrt(Pw(0,0)*Pw(0,0) + Pw(0,2)*Pw(0,2));
double theta = atan2(Pw(0,2),Pw(0,0)) - PI/2.0;
cout<<"标志点距离相机的距离是:"<<d<<endl;
cout<<"计算出来是theta角是:"<<theta<<endl;
//dist[i] = d;
dist.push_back(d);
theta_.push_back(theta);
// if (i == 0)
// {
// theta_.push_back(theta - PI/2.0);
// //theta_[i] = theta - PI/2.0;
// }
// else
// {
// //theta_[i] = theta - theta_[i-1] - PI/2.0;
// theta_.push_back(theta - PI/2.0);
// //theta_[i] = theta - PI/2.0;
// }
my_EKF::Vector x_now;
for (int j = 0;j < 30;j++)
{
filter.timeUpdateStep(u);
// if (i == 0) //初始第一步没有加控制的时候不做ekf
// {
// x_now = filter.getX();
// }
if (i >= 0)
{
filter.set_obj(markerId);
if (markerId == -1 ) //没有检测到障碍物只做预测
{
cout<<"只做预测"<<endl;
//filter.timeUpdateStep(u);
x_now = filter.getX();
//x_now = filter.predict(u);
}
else //检测到障碍物,做预测和更新
{
//根据序号得到测量值
cout<<"做卡尔曼滤波"<<endl;
z(1) = d;
z(2) = theta_[i];
//filter.step(u,z);
filter.measureUpdateStep(z);
x_now = filter.getX();
}
// x(1) = x_now(1);
// x(2) = x_now(2);
// x(3) = x_now(3);
}
u(1) = 0;
u(2) = 0;
}
// filter.timeUpdateStep(u);
// // if (i == 0) //初始第一步没有加控制的时候不做ekf
// // {
// // x_now = filter.getX();
// // }
// if (i >= 0)
// {
// filter.set_obj(markerId);
// if (markerId == -1) //没有检测到障碍物只做预测
// {
// cout<<"只做预测"<<endl;
// //filter.timeUpdateStep(u);
// x_now = filter.getX();
// //x_now = filter.predict(u);
// }
// else //检测到障碍物,做预测和更新
// {
// //根据序号得到测量值
// cout<<"做卡尔曼滤波"<<endl;
// z(1) = d;
// z(2) = theta;
// //filter.step(u,z);
// filter.measureUpdateStep(z);
// x_now = filter.getX();
// }
// // x(1) = x_now(1);
// // x(2) = x_now(2);
// // x(3) = x_now(3);
// }
xout.push_back(x_now(1));
yout.push_back(x_now(2));
thetaout.push_back(x_now(3));
//xout[i] = x_now(1);
//yout[i] = x_now(2);
//thetaout[i] = x_now(3);
cout<<"X = "<<x_now(1)<<" "<<"Y = "<<x_now(2)<<" "<<"Theta = "<<x_now(3)<<endl;
//getchar();
c = getch();
if (c == -32)
{
c = getch();
}
if (c == 13)
{
break;
}
if (c == 72)
{
robot.lock();
robot.setVel(100);
robot.setRotVel(0);
robot.unlock();
u(1) = 100; //添加控制
u(2) = 0;
ArUtil::sleep(2000);
}
if (c == 80)
{
robot.lock();
robot.setVel(-100);
robot.setRotVel(0);
robot.unlock();
u(1) = -100; //添加控制
u(2) = 0;
ArUtil::sleep(2000);
}
if (c == 77)
{
robot.lock();
robot.setVel(100);
robot.setRotVel(-15);
robot.unlock();
u(1) = 100; //添加控制
u(2) = -PI/12.0;
ArUtil::sleep(2000);
}
if (c == 75)
{
robot.lock();
robot.setVel(100);
robot.setRotVel(15);
robot.unlock();
u(1) = 100; //添加控制
u(2) = PI/12;
cout<<"left"<<endl;
ArUtil::sleep(2000);
}
robot.lock();
robot.stop();
robot.unlock();
ArUtil::sleep(100);
picname.clear();
picnumber.clear();
//ss.clear();
}
ofstream file(a+"dist_theta.txt");
for (int i = 0;i < dist.size();i ++)
{
file<<dist[i]<<" "<<theta_[i]<<endl;
}
file.close();
ofstream file1(a+"out.txt");
for (int i = 0;i < xout.size();i ++)
{
file1<<xout[i]<<" "<<yout[i]<<" "<<thetaout[i]<<endl;
}
file1.close();
ofstream file2(a+"ARpost.txt");
for (int i = 0;i < robotposelist.size();i ++)
{
file2<<robotposelist[i].getX()<<" "<<robotposelist[i].getY()<<" "<<robotposelist[i].getTh()<<endl;
}
file2.close();
ofstream file3(a+"isekf.txt");
for (int i = 0;i < isekf.size();i ++)
{
file3<<isekf[i]<<endl;
}
file3.close();
// printf("directMotionExample: Telling the robot to go 300 mm on left wheel and 100 mm on right wheel for 5 seconds\n");
// robot.lock();
// robot.setVel2(300, 100);
// robot.unlock();
// ArTime start;
// start.setToNow();
// while (1)
// {
// robot.lock();
// currentpose = robot.getPose();
// if (start.mSecSince() > 5000)
// {
// robot.unlock();
// break;
// }
// robot.unlock();
// cout<<"x = "<<fixed<<setprecision(3)<<setw(8)<<currentpose.getX()<<" ";
// cout<<"y = "<<fixed<<setprecision(3)<<setw(8)<<currentpose.getY()<<" ";
// cout<<"th = "<<fixed<<setprecision(3)<<setw(8)<<currentpose.getTh()<<"\n";
// ArUtil::sleep(50);
// }
// cout<<endl;
Aria::exit(0);
delete tracker;
camera.closecamera();
//waitKey();
//system("pause");
cout<<a<<endl;
getchar();
return 0;
}
机器人定位程序
最新推荐文章于 2021-12-31 12:19:02 发布