Visual Servoing Platform version 3.5.0
servoBebop2.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See http://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * Example that shows how to do visual servoing with Parrot Bebop 2 drone in ViSP.
33 *
34 * Authors:
35 * Fabien Spindler
36 * Gatien Gaumerais
37 *
38 *****************************************************************************/
39
40#include <iostream>
41
42#include <visp3/core/vpConfig.h>
43#include <visp3/core/vpMomentAreaNormalized.h>
44#include <visp3/core/vpMomentBasic.h>
45#include <visp3/core/vpMomentCentered.h>
46#include <visp3/core/vpMomentDatabase.h>
47#include <visp3/core/vpMomentGravityCenter.h>
48#include <visp3/core/vpMomentGravityCenterNormalized.h>
49#include <visp3/core/vpMomentObject.h>
50#include <visp3/core/vpPixelMeterConversion.h>
51#include <visp3/core/vpPoint.h>
52#include <visp3/core/vpTime.h>
53#include <visp3/detection/vpDetectorAprilTag.h>
54#include <visp3/gui/vpDisplayX.h>
55#include <visp3/gui/vpPlot.h>
56#include <visp3/robot/vpRobotBebop2.h>
57#include <visp3/visual_features/vpFeatureBuilder.h>
58#include <visp3/visual_features/vpFeatureMomentAreaNormalized.h>
59#include <visp3/visual_features/vpFeatureMomentGravityCenterNormalized.h>
60#include <visp3/visual_features/vpFeatureVanishingPoint.h>
61#include <visp3/vs/vpServo.h>
62#include <visp3/vs/vpServoDisplay.h>
63#include <visp3/core/vpXmlParserCamera.h>
64
65#if !defined(VISP_HAVE_ARSDK)
66int main()
67{
68 std::cout << "\nThis example requires Parrot ARSDK3 library. You should install it.\n" << std::endl;
69 return EXIT_SUCCESS;
70}
71#elif !defined(VISP_HAVE_FFMPEG)
72int main()
73{
74 std::cout << "\nThis example requires ffmpeg library. You should install it.\n" << std::endl;
75 return EXIT_SUCCESS;
76}
77#elif (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
78int main()
79{
80 std::cout << "\nThis example requires cxx11 standard or higher. Turn it on using cmake -DUSE_CXX_STANDARD=11.\n" << std::endl;
81 return EXIT_SUCCESS;
82}
83#else
84
85bool compareImagePoint(std::pair<size_t, vpImagePoint> p1, std::pair<size_t, vpImagePoint> p2)
86{
87 return (p1.second.get_v() < p2.second.get_v());
88};
89
101int main(int argc, char **argv)
102{
103 try {
104
105 std::string ip_address = "192.168.42.1";
106 std::string opt_cam_parameters;
107 bool opt_has_cam_parameters = false;
108
109 double tagSize = -1;
110
111 double opt_distance_to_tag = -1;
112 bool opt_has_distance_to_tag = false;
113
114 int stream_res = 0; // Default 480p resolution
115
116 bool verbose = false;
117
118 if (argc >= 3 && std::string(argv[1]) == "--tag_size") {
119 tagSize = std::atof(argv[2]); // Tag size option is required
120 if (tagSize <= 0) {
121 std::cout << "Error : invalid tag size." << std::endl << "See " << argv[0] << " --help" << std::endl;
122 return 0;
123 }
124 for (int i = 3; i < argc; i++) {
125 if (std::string(argv[i]) == "--ip" && i + 1 < argc) {
126 ip_address = std::string(argv[i + 1]);
127 i++;
128 } else if (std::string(argv[i]) == "--distance_to_tag" && i + 1 < argc) {
129 opt_distance_to_tag = std::atof(argv[i + 1]);
130 if (opt_distance_to_tag <= 0) {
131 std::cout << "Error : invalid distance to tag." << std::endl << "See " << argv[0] << " --help" << std::endl;
132 return 0;
133 }
134 opt_has_distance_to_tag = true;
135 i++;
136 } else if (std::string(argv[i]) == "--intrinsic") {
137
138 opt_cam_parameters = std::string(argv[i + 1]);
139 opt_has_cam_parameters = true;
140 i++;
141 } else if (std::string(argv[i]) == "--hd_stream") {
142 stream_res = 1;
143 } else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") {
144 verbose = true;
145 } else {
146 std::cout << "Error : unknown parameter " << argv[i] << std::endl
147 << "See " << argv[0] << " --help" << std::endl;
148 return 0;
149 }
150 }
151 } else if (argc >= 2 && (std::string(argv[1]) == "--help" || std::string(argv[1]) == "-h")) {
152 std::cout << "\nUsage:\n"
153 << " " << argv[0]
154 << " [--tag_size <size>] [--ip <drone ip>] [--distance_to_tag <distance>] [--intrinsic <xml file>] "
155 << "[--hd_stream] [--verbose] [-v] [--help] [-h]\n"
156 << std::endl
157 << "Description:\n"
158 << " --tag_size <size>\n"
159 << " The size of the tag to detect in meters, required.\n\n"
160 << " --ip <drone ip>\n"
161 << " Ip address of the drone to which you want to connect (default : 192.168.42.1).\n\n"
162 << " --distance_to_tag <distance>\n"
163 << " The desired distance to the tag in meters (default: 1 meter).\n\n"
164 << " --intrinsic <xml file>\n"
165 << " XML file containing computed intrinsic camera parameters (default: empty).\n\n"
166 << " --hd_stream\n"
167 << " Enables HD 720p streaming instead of default 480p.\n"
168 << " Allows to increase range and accuracy of the tag detection,\n"
169 << " but increases latency and computation time.\n"
170 << " Caution: camera calibration settings are different for the two resolutions.\n"
171 << " Make sure that if you pass custom intrinsic camera parameters,\n"
172 << " they were obtained with the correct resolution.\n\n"
173 << " --verbose, -v\n"
174 << " Enables verbose (drone information messages and velocity commands\n"
175 << " are then displayed).\n\n"
176 << " --help, -h\n"
177 << " Print help message.\n"
178 << std::endl;
179 return 0;
180
181 } else {
182 std::cout << "Error : tag size parameter required." << std::endl << "See " << argv[0] << " --help" << std::endl;
183 return 0;
184 }
185
186 std::cout
187 << "\nWARNING: \n - This program does no sensing or avoiding of "
188 "obstacles, \n"
189 "the drone WILL collide with any objects in the way! Make sure "
190 "the \n"
191 "drone has approximately 3 meters of free space on all sides.\n"
192 " - The drone uses a downward-facing camera for horizontal speed estimation,\n make sure the drone flies "
193 "above a non-uniform flooring,\n or its movement will be inacurate and dangerous !\n"
194
195 << std::endl;
196
197 vpRobotBebop2 drone(
198 verbose, true, ip_address); // Create the drone with desired verbose level, settings reset, and corresponding IP
199
200 if (drone.isRunning()) {
201
202 drone.setVideoResolution(stream_res); // Set video resolution to 480p (default) or 720p
203
204 drone.setStreamingMode(0); // Set lowest latency stream mode
205 drone.setVideoStabilisationMode(0); // Disable video stabilisation
206
207 drone.doFlatTrim(); // Flat trim calibration
208
209 drone.startStreaming(); // Start streaming and decoding video data
210
211 drone.setExposure(1.5f); // Set exposure to max so that the aprilTag detection is more efficient
212
213 drone.setCameraOrientation(-5., 0.,
214 true); // Set camera to look slightly down so that the drone is slightly above the tag
215
216 drone.takeOff(true); // Take off
217
219 drone.getGrayscaleImage(I);
220
221#if defined VISP_HAVE_X11
222 vpDisplayX display;
223#elif defined VISP_HAVE_GTK
224 vpDisplayGTK display;
225#elif defined VISP_HAVE_GDI
226 vpDisplayGDI display;
227#elif defined VISP_HAVE_OPENCV
228 vpDisplayOpenCV display;
229#endif
230 int orig_displayX = 100;
231 int orig_displayY = 100;
232 display.init(I, orig_displayX, orig_displayY, "DRONE VIEW");
235
236 vpPlot plotter(1, 700, 700, orig_displayX + static_cast<int>(I.getWidth()) + 20, orig_displayY,
237 "Visual servoing tasks");
238 unsigned int iter = 0;
239
241 vpDetectorAprilTag detector(tagFamily); // The detector used to detect Apritags
242 detector.setAprilTagQuadDecimate(4.0);
243 detector.setAprilTagNbThreads(4);
244 detector.setDisplayTag(true);
245
247
248 if (opt_has_cam_parameters) {
249
252
253 if (p.parse(cam, opt_cam_parameters, "Camera", projModel, I.getWidth(), I.getHeight()) !=
255 std::cout << "Cannot find parameters in XML file " << opt_cam_parameters << std::endl;
256 if (drone.getVideoHeight() == 720) { // 720p streaming
257 cam.initPersProjWithoutDistortion(785.6412585, 785.3322447, 637.9049857, 359.7524531);
258 } else { // 480p streaming
259 cam.initPersProjWithoutDistortion(531.9213063, 520.8495788, 429.133986, 240.9464457);
260 }
261 }
262 } else {
263 std::cout << "Setting default camera parameters ... " << std::endl;
264 if (drone.getVideoHeight() == 720) { // 720p streaming
265 cam.initPersProjWithoutDistortion(785.6412585, 785.3322447, 637.9049857, 359.7524531);
266 } else { // 480p streaming
267 cam.initPersProjWithoutDistortion(531.9213063, 520.8495788, 429.133986, 240.9464457);
268 }
269 }
270 cam.printParameters();
271
272 vpServo task; // Visual servoing task
273
274 // double lambda = 0.5;
275 vpAdaptiveGain lambda = vpAdaptiveGain(1.5, 0.7, 30);
278 task.setLambda(lambda);
279
280 /*
281 In the following section, camera 1 refers to camera coordonates system of the drone, but without taking camera
282 pan and camera tilt into account.
283 Those values are taken into consideration in Camera 2.
284 E is the effective coordinate system of the drone, the one in which we need to convert every velocity command.
285
286 We can easily compute homogeneous matrix between camera 1 and camera 2, and between camera 1
287 and effective coordonate system E of the drone.
288
289 Using those matrices, we can in the end obtain the matrix between c2 and E
290 */
291 vpRxyzVector c1_rxyz_c2(vpMath::rad(drone.getCurrentCameraTilt()), vpMath::rad(drone.getCurrentCameraPan()), 0);
292 vpRotationMatrix c1Rc2(c1_rxyz_c2); // Rotation between camera 1 and 2
293 vpHomogeneousMatrix c1Mc2(vpTranslationVector(), c1Rc2); // Homogeneous matrix between c1 and c2
294
295 vpRotationMatrix c1Re{0, 1, 0, 0, 0, 1, 1, 0, 0}; // Rotation between camera 1 and E
296 vpTranslationVector c1te(0, 0, -0.09); // Translation between camera 1 and E
297 vpHomogeneousMatrix c1Me(c1te, c1Re); // Homogeneous matrix between c1 and E
298
299 vpHomogeneousMatrix c2Me = c1Mc2.inverse() * c1Me; // Homogeneous matrix between c2 and E
300
301 vpVelocityTwistMatrix cVe(c2Me);
302 task.set_cVe(cVe);
303
304 vpMatrix eJe(6, 4, 0);
305
306 eJe[0][0] = 1;
307 eJe[1][1] = 1;
308 eJe[2][2] = 1;
309 eJe[5][3] = 1;
310
311 // double Z_d = 1.; // Desired distance to the target
312 double Z_d = (opt_has_distance_to_tag ? opt_distance_to_tag : 1.);
313
314 // Define the desired polygon corresponding the the AprilTag CLOCKWISE
315 double X[4] = {tagSize / 2., tagSize / 2., -tagSize / 2., -tagSize / 2.};
316 double Y[4] = {tagSize / 2., -tagSize / 2., -tagSize / 2., tagSize / 2.};
317 std::vector<vpPoint> vec_P, vec_P_d;
318
319 for (int i = 0; i < 4; i++) {
320 vpPoint P_d(X[i], Y[i], 0);
321 vpHomogeneousMatrix cdMo(0, 0, Z_d, 0, 0, 0);
322 P_d.track(cdMo); //
323 vec_P_d.push_back(P_d);
324 }
325 vpMomentObject m_obj(3), m_obj_d(3);
326 vpMomentDatabase mdb, mdb_d;
327 vpMomentBasic mb_d; // Here only to get the desired area m00
328 vpMomentGravityCenter mg, mg_d;
329 vpMomentCentered mc, mc_d;
330 vpMomentAreaNormalized man(0, Z_d), man_d(0, Z_d); // Declare normalized area updated below with m00
331 vpMomentGravityCenterNormalized mgn, mgn_d; // Declare normalized gravity center
332
333 // Desired moments
334 m_obj_d.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
335 m_obj_d.fromVector(vec_P_d); // Initialize the object with the points coordinates
336
337 mb_d.linkTo(mdb_d); // Add basic moments to database
338 mg_d.linkTo(mdb_d); // Add gravity center to database
339 mc_d.linkTo(mdb_d); // Add centered moments to database
340 man_d.linkTo(mdb_d); // Add area normalized to database
341 mgn_d.linkTo(mdb_d); // Add gravity center normalized to database
342 mdb_d.updateAll(m_obj_d); // All of the moments must be updated, not just an_d
343 mg_d.compute(); // Compute gravity center moment
344 mc_d.compute(); // Compute centered moments AFTER gravity center
345
346 double area = 0;
347 if (m_obj_d.getType() == vpMomentObject::DISCRETE)
348 area = mb_d.get(2, 0) + mb_d.get(0, 2);
349 else
350 area = mb_d.get(0, 0);
351 // Update moment with the desired area
352 man_d.setDesiredArea(area);
353
354 man_d.compute(); // Compute area normalized moment AFTER centered moments
355 mgn_d.compute(); // Compute gravity center normalized moment AFTER area normalized moment
356
357 // Desired plane
358 double A = 0.0;
359 double B = 0.0;
360 double C = 1.0 / Z_d;
361
362 // Construct area normalized features
363 vpFeatureMomentGravityCenterNormalized s_mgn(mdb, A, B, C), s_mgn_d(mdb_d, A, B, C);
364 vpFeatureMomentAreaNormalized s_man(mdb, A, B, C), s_man_d(mdb_d, A, B, C);
365 vpFeatureVanishingPoint s_vp, s_vp_d;
366
367 // Add the features
368 task.addFeature(s_mgn, s_mgn_d);
369 task.addFeature(s_man, s_man_d);
371
372 plotter.initGraph(0, 4);
373 plotter.setLegend(0, 0, "Xn"); // Distance from center on X axis feature
374 plotter.setLegend(0, 1, "Yn"); // Distance from center on Y axis feature
375 plotter.setLegend(0, 2, "an"); // Tag area feature
376 plotter.setLegend(0, 3, "atan(1/rho)"); // Vanishing point feature
377
378 // Update desired gravity center normalized feature
379 s_mgn_d.update(A, B, C);
380 s_mgn_d.compute_interaction();
381 // Update desired area normalized feature
382 s_man_d.update(A, B, C);
383 s_man_d.compute_interaction();
384
385 // Update desired vanishing point feature for the horizontal line
386 s_vp_d.setAtanOneOverRho(0);
387 s_vp_d.setAlpha(0);
388
389 bool runLoop = true;
390 bool vec_ip_has_been_sorted = false;
391 std::vector<std::pair<size_t, vpImagePoint> > vec_ip_sorted;
392
393 //** Visual servoing loop **//
394 while (drone.isRunning() && drone.isStreaming() && runLoop) {
395
396 double startTime = vpTime::measureTimeMs();
397
398 drone.getGrayscaleImage(I);
400
401 std::vector<vpHomogeneousMatrix> cMo_vec;
402 detector.detect(I, tagSize, cam, cMo_vec); // Detect AprilTags in current image
403 double t = vpTime::measureTimeMs() - startTime;
404
405 {
406 std::stringstream ss;
407 ss << "Detection time: " << t << " ms";
408 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
409 }
410
411 if (detector.getNbObjects() == 1) {
412
413 // Update current points used to compute the moments
414 std::vector<vpImagePoint> vec_ip = detector.getPolygon(0);
415 vec_P.clear();
416 for (size_t i = 0; i < vec_ip.size(); i++) { // size = 4
417 double x = 0, y = 0;
418 vpPixelMeterConversion::convertPoint(cam, vec_ip[i], x, y);
419 vpPoint P;
420 P.set_x(x);
421 P.set_y(y);
422 vec_P.push_back(P);
423 }
424
425 // Current moments
426 m_obj.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
427 m_obj.fromVector(vec_P); // Initialize the object with the points coordinates
428
429 mg.linkTo(mdb); // Add gravity center to database
430 mc.linkTo(mdb); // Add centered moments to database
431 man.linkTo(mdb); // Add area normalized to database
432 mgn.linkTo(mdb); // Add gravity center normalized to database
433 mdb.updateAll(m_obj); // All of the moments must be updated, not just an_d
434 mg.compute(); // Compute gravity center moment
435 mc.compute(); // Compute centered moments AFTER gravity center
436 man.setDesiredArea(area); // Desired area was init at 0 (unknow at contruction), need to be updated here
437 man.compute(); // Compute area normalized moment AFTER centered moment
438 mgn.compute(); // Compute gravity center normalized moment AFTER area normalized moment
439
440 s_mgn.update(A, B, C);
441 s_mgn.compute_interaction();
442 s_man.update(A, B, C);
443 s_man.compute_interaction();
444
445 /* Sort points from their height in the image, and keep original indexes.
446 This is done once, in order to be independent from the orientation of the tag
447 when detecting vanishing points. */
448 if (!vec_ip_has_been_sorted) {
449 for (size_t i = 0; i < vec_ip.size(); i++) {
450
451 // Add the points and their corresponding index
452 std::pair<size_t, vpImagePoint> index_pair = std::pair<size_t, vpImagePoint>(i, vec_ip[i]);
453 vec_ip_sorted.push_back(index_pair);
454 }
455
456 // Sort the points and indexes from the v value of the points
457 std::sort(vec_ip_sorted.begin(), vec_ip_sorted.end(), compareImagePoint);
458
459 vec_ip_has_been_sorted = true;
460 }
461
462 // Use the two highest points for the first line, and the two others for the second line.
463 vpFeatureBuilder::create(s_vp, cam, vec_ip[vec_ip_sorted[0].first], vec_ip[vec_ip_sorted[1].first],
464 vec_ip[vec_ip_sorted[2].first], vec_ip[vec_ip_sorted[3].first],
466
467 task.set_cVe(cVe);
468 task.set_eJe(eJe);
469
470 // Compute the control law. Velocities are computed in the mobile robot reference frame
471 vpColVector ve = task.computeControlLaw();
472
473 // Sending the control law to the drone
474 if (verbose) {
475 std::cout << "ve: " << ve.t() << std::endl;
476 }
477 drone.setVelocity(ve, 1.0);
478
479 for (size_t i = 0; i < 4; i++) {
480 vpDisplay::displayCross(I, vec_ip[i], 15, vpColor::red, 1);
481 std::stringstream ss;
482 ss << i;
483 vpDisplay::displayText(I, vec_ip[i] + vpImagePoint(15, 15), ss.str(), vpColor::green);
484 }
485
486 // Display visual features
487 vpDisplay::displayPolygon(I, vec_ip, vpColor::green, 3); // Current polygon used to compure an moment
488 vpDisplay::displayCross(I, detector.getCog(0), 15, vpColor::green,
489 3); // Current polygon used to compute a moment
490 vpDisplay::displayLine(I, 0, static_cast<int>(cam.get_u0()), static_cast<int>(I.getHeight()) - 1,
491 static_cast<int>(cam.get_u0()), vpColor::red,
492 3); // Vertical line as desired x position
493 vpDisplay::displayLine(I, static_cast<int>(cam.get_v0()), 0, static_cast<int>(cam.get_v0()),
494 static_cast<int>(I.getWidth()) - 1, vpColor::red,
495 3); // Horizontal line as desired y position
496
497 // Display lines corresponding to the vanishing point for the horizontal lines
498 vpDisplay::displayLine(I, vec_ip[vec_ip_sorted[0].first], vec_ip[vec_ip_sorted[1].first], vpColor::red, 1,
499 false);
500 vpDisplay::displayLine(I, vec_ip[vec_ip_sorted[2].first], vec_ip[vec_ip_sorted[3].first], vpColor::red, 1,
501 false);
502
503 } else {
504 std::stringstream sserr;
505 sserr << "Failed to detect an Apriltag, or detected multiple ones";
506 vpDisplay::displayText(I, 120, 20, sserr.str(), vpColor::red);
508 drone.stopMoving(); // In this case, we stop the drone
509 }
510
511 vpDisplay::displayText(I, 10, 10, "Click to exit", vpColor::red);
513 if (vpDisplay::getClick(I, false)) {
514 drone.land();
515 runLoop = false;
516 }
517
518 plotter.plot(0, iter, task.getError());
519
520 double totalTime = vpTime::measureTimeMs() - startTime;
521 std::stringstream sstime;
522 sstime << "Total time: " << totalTime << " ms";
523 vpDisplay::displayText(I, 80, 20, sstime.str(), vpColor::red);
525
526 iter++;
527 vpTime::wait(startTime, 40.0); // We wait a total of 40 milliseconds
528 }
529
530 return EXIT_SUCCESS;
531
532 } else {
533 std::cout << "ERROR : failed to setup drone control." << std::endl;
534 return EXIT_FAILURE;
535 }
536 } catch (const vpException &e) {
537 std::cout << "Caught an exception: " << e << std::endl;
538 return EXIT_FAILURE;
539 }
540}
541
542#endif // #elif !defined(VISP_HAVE_FFMPEG)
Adaptive gain computation.
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:131
vpRowVector t() const
static const vpColor red
Definition: vpColor.h:217
static const vpColor green
Definition: vpColor.h:220
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:129
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
Definition: vpDisplayGTK.h:135
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:135
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
static void displayPolygon(const vpImage< unsigned char > &I, const std::vector< vpImagePoint > &vip, const vpColor &color, unsigned int thickness=1, bool closed=true)
error that can be emited by ViSP classes.
Definition: vpException.h:72
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Functionality computation for normalized surface moment feature. Computes the interaction matrix asso...
Functionality computation for centered and normalized moment feature. Computes the interaction matrix...
static unsigned int selectAtanOneOverRho()
void setAlpha(double alpha)
Set vanishing point feature value.
void setAtanOneOverRho(double atan_one_over_rho)
Set vanishing point feature value.
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:88
unsigned int getWidth() const
Definition: vpImage.h:246
unsigned int getHeight() const
Definition: vpImage.h:188
static double rad(double deg)
Definition: vpMath.h:110
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:154
Class handling the normalized surface moment that is invariant in scale and used to estimate depth.
This class defines the 2D basic moment . This class is a wrapper for vpMomentObject wich allows to us...
Definition: vpMomentBasic.h:75
const std::vector< double > & get() const
This class defines the double-indexed centered moment descriptor .
This class allows to register all vpMoments so they can access each other according to their dependen...
virtual void updateAll(vpMomentObject &object)
Class describing 2D normalized gravity center moment.
Class describing 2D gravity center moment.
Class for generic objects.
void linkTo(vpMomentDatabase &moments)
Definition: vpMoment.cpp:98
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition: vpPlot.h:116
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:82
void set_x(double x)
Set the point x coordinate in the image plane.
Definition: vpPoint.cpp:511
void set_y(double y)
Set the point y coordinate in the image plane.
Definition: vpPoint.cpp:513
Implementation of a rotation matrix and operations on such kind of matrices.
Implementation of a rotation vector as Euler angle minimal representation.
Definition: vpRxyzVector.h:184
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:567
@ EYEINHAND_L_cVe_eJe
Definition: vpServo.h:159
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition: vpServo.h:448
void setLambda(double c)
Definition: vpServo.h:404
void set_eJe(const vpMatrix &eJe_)
Definition: vpServo.h:506
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:218
vpColVector getError() const
Definition: vpServo.h:278
vpColVector computeControlLaw()
Definition: vpServo.cpp:929
@ CURRENT
Definition: vpServo.h:182
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:490
Class that consider the case of a translation vector.
XML parser to load and save intrinsic camera parameters.
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, unsigned int image_width=0, unsigned int image_height=0)
VISP_EXPORT int wait(double t0, double t)
VISP_EXPORT double measureTimeMs()