1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181
|
/*! \example tutorial-ibvs-4pts-wireframe-robot-viper.cpp */
#include <visp3/core/vpConfig.h>
#include <visp3/gui/vpDisplayFactory.h>
#include <visp3/robot/vpSimulatorViper850.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/vs/vpServo.h>
#ifdef ENABLE_VISP_NAMESPACE
using namespace VISP_NAMESPACE_NAME;
#endif
void display_trajectory(const vpImage<unsigned char> &I, std::vector<vpPoint> &point, const vpHomogeneousMatrix &cMo,
const vpCameraParameters &cam)
{
unsigned int thickness = 3;
VP_ATTRIBUTE_NO_DESTROY static std::vector<vpImagePoint> traj[4];
vpImagePoint cog;
for (unsigned int i = 0; i < 4; i++) {
// Project the point at the given camera position
point[i].project(cMo);
vpMeterPixelConversion::convertPoint(cam, point[i].get_x(), point[i].get_y(), cog);
traj[i].push_back(cog);
}
for (unsigned int i = 0; i < 4; i++) {
for (unsigned int j = 1; j < traj[i].size(); j++) {
vpDisplay::displayLine(I, traj[i][j - 1], traj[i][j], vpColor::green, thickness);
}
}
}
int main()
{
#if defined(VISP_HAVE_THREADS)
#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
std::shared_ptr<vpDisplay> display;
#else
vpDisplay *display = nullptr;
#endif
try {
vpHomogeneousMatrix cdMo(0, 0, 0.75, 0, 0, 0);
vpHomogeneousMatrix cMo(0.15, -0.1, 1., vpMath::rad(10), vpMath::rad(-10), vpMath::rad(50));
/*
Top view of the world frame, the camera frame and the object frame
world, also robot base frame : --> w_y
|
\|/
w_x
object :
o_y
/|\
|
o_x <--
camera :
c_y
/|\
|
c_x <--
*/
vpHomogeneousMatrix wMo(vpTranslationVector(0.40, 0, -0.15), vpRotationMatrix(vpRxyzVector(-M_PI, 0, M_PI / 2.)));
std::vector<vpPoint> point;
point.push_back(vpPoint(-0.1, -0.1, 0));
point.push_back(vpPoint(0.1, -0.1, 0));
point.push_back(vpPoint(0.1, 0.1, 0));
point.push_back(vpPoint(-0.1, 0.1, 0));
vpServo task;
task.setServo(vpServo::EYEINHAND_CAMERA);
task.setInteractionMatrixType(vpServo::CURRENT);
task.setLambda(0.5);
vpFeaturePoint p[4], pd[4];
for (unsigned int i = 0; i < 4; i++) {
point[i].track(cdMo);
vpFeatureBuilder::create(pd[i], point[i]);
point[i].track(cMo);
vpFeatureBuilder::create(p[i], point[i]);
task.addFeature(p[i], pd[i]);
}
vpSimulatorViper850 robot(true);
robot.setVerbose(true);
// Enlarge the default joint limits
vpColVector qmin = robot.getJointMin();
vpColVector qmax = robot.getJointMax();
qmin[0] = -vpMath::rad(180);
qmax[0] = vpMath::rad(180);
qmax[1] = vpMath::rad(0);
qmax[2] = vpMath::rad(270);
qmin[4] = -vpMath::rad(180);
qmax[4] = vpMath::rad(180);
robot.setJointLimit(qmin, qmax);
std::cout << "Robot joint limits: " << std::endl;
for (unsigned int i = 0; i < qmin.size(); i++)
std::cout << "Joint " << i << ": min " << vpMath::deg(qmin[i]) << " max " << vpMath::deg(qmax[i]) << " (deg)"
<< std::endl;
robot.init(vpViper850::TOOL_PTGREY_FLEA2_CAMERA, vpCameraParameters::perspectiveProjWithoutDistortion);
robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
robot.initScene(vpWireFrameSimulator::PLATE, vpWireFrameSimulator::D_STANDARD);
robot.set_fMo(wMo);
bool ret = robot.initialiseCameraRelativeToObject(cMo);
if (ret == false)
return EXIT_FAILURE; // Not able to set the position
robot.setDesiredCameraPosition(cdMo);
// We modify the default external camera position
robot.setExternalCameraPosition(
vpHomogeneousMatrix(vpTranslationVector(-0.4, 0.4, 2), vpRotationMatrix(vpRxyzVector(M_PI / 2, 0, 0))));
vpImage<unsigned char> Iint(480, 640, 255);
#if defined(VISP_HAVE_DISPLAY)
#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
display = vpDisplayFactory::createDisplay(Iint, 700, 0, "Internal view");
#else
display = vpDisplayFactory::allocateDisplay(Iint, 700, 0, "Internal view");
#endif
#else
std::cout << "No image viewer is available..." << std::endl;
#endif
vpCameraParameters cam(840, 840, Iint.getWidth() / 2, Iint.getHeight() / 2);
// Modify the camera parameters to match those used in the other
// simulations
robot.setCameraParameters(cam);
bool start = true;
// for ( ; ; )
for (int iter = 0; iter < 275; iter++) {
cMo = robot.get_cMo();
for (unsigned int i = 0; i < 4; i++) {
point[i].track(cMo);
vpFeatureBuilder::create(p[i], point[i]);
}
vpDisplay::display(Iint);
robot.getInternalView(Iint);
if (!start) {
display_trajectory(Iint, point, cMo, cam);
vpDisplay::displayText(Iint, 40, 120, "Click to stop the servo...", vpColor::red);
}
vpDisplay::flush(Iint);
vpColVector v = task.computeControlLaw();
robot.setVelocity(vpRobot::CAMERA_FRAME, v);
// A click to exit
if (vpDisplay::getClick(Iint, false))
break;
if (start) {
start = false;
v = 0;
robot.setVelocity(vpRobot::CAMERA_FRAME, v);
vpDisplay::displayText(Iint, 40, 120, "Click to start the servo...", vpColor::blue);
vpDisplay::flush(Iint);
// vpDisplay::getClick(Iint);
}
vpTime::wait(1000 * robot.getSamplingTime());
}
}
catch (const vpException &e) {
std::cout << "Catch an exception: " << e << std::endl;
}
#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
if (display != nullptr) {
delete display;
}
#endif
#endif
}
|