Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
servoViper850FourPoints2DCamVelocityLs_cur.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * tests the control law
33 * eye-in-hand control
34 * velocity computed in the camera frame
35 *
36*****************************************************************************/
49#include <visp3/core/vpConfig.h>
50#include <visp3/core/vpDebug.h> // Debug trace
51
52#include <fstream>
53#include <iostream>
54#include <sstream>
55#include <stdio.h>
56#include <stdlib.h>
57#if (defined(VISP_HAVE_VIPER850) && defined(VISP_HAVE_DC1394))
58
59#include <visp3/blob/vpDot2.h>
60#include <visp3/core/vpDisplay.h>
61#include <visp3/core/vpHomogeneousMatrix.h>
62#include <visp3/core/vpImage.h>
63#include <visp3/core/vpIoTools.h>
64#include <visp3/core/vpMath.h>
65#include <visp3/core/vpPoint.h>
66#include <visp3/gui/vpDisplayGTK.h>
67#include <visp3/gui/vpDisplayOpenCV.h>
68#include <visp3/gui/vpDisplayX.h>
69#include <visp3/robot/vpRobotViper850.h>
70#include <visp3/sensor/vp1394TwoGrabber.h>
71#include <visp3/vision/vpPose.h>
72#include <visp3/visual_features/vpFeatureBuilder.h>
73#include <visp3/visual_features/vpFeaturePoint.h>
74#include <visp3/vs/vpServo.h>
75#include <visp3/vs/vpServoDisplay.h>
76
77#define L 0.05 // to deal with a 10cm by 10cm square
78
100void compute_pose(vpPoint point[], vpDot2 dot[], int ndot, vpCameraParameters cam, vpHomogeneousMatrix &cMo, bool init)
101{
103 vpPose pose;
104 vpImagePoint cog;
105 for (int i = 0; i < ndot; i++) {
106
107 double x = 0, y = 0;
108 cog = dot[i].getCog();
110 y); // pixel to meter conversion
111 point[i].set_x(x); // projection perspective p
112 point[i].set_y(y);
113 pose.addPoint(point[i]);
114 }
115
116 if (init == true) {
118 } else { // init = false; use of the previous pose to initialise VIRTUAL_VS
120 }
121}
122
123int main()
124{
125 // Log file creation in /tmp/$USERNAME/log.dat
126 // This file contains by line:
127 // - the 6 computed joint velocities (m/s, rad/s) to achieve the task
128 // - the 6 mesured joint velocities (m/s, rad/s)
129 // - the 6 mesured joint positions (m, rad)
130 // - the 8 values of s - s*
131 std::string username;
132 // Get the user login name
133 vpIoTools::getUserName(username);
134
135 // Create a log filename to save velocities...
136 std::string logdirname;
137 logdirname = "/tmp/" + username;
138
139 // Test if the output path exist. If no try to create it
140 if (vpIoTools::checkDirectory(logdirname) == false) {
141 try {
142 // Create the dirname
143 vpIoTools::makeDirectory(logdirname);
144 } catch (...) {
145 std::cerr << std::endl << "ERROR:" << std::endl;
146 std::cerr << " Cannot create " << logdirname << std::endl;
147 return EXIT_FAILURE;
148 }
149 }
150 std::string logfilename;
151 logfilename = logdirname + "/log.dat";
152
153 // Open the log file name
154 std::ofstream flog(logfilename.c_str());
155
156 try {
157 vpRobotViper850 robot;
158 // Load the end-effector to camera frame transformation obtained
159 // using a camera intrinsic model with distortion
162
163 vpServo task;
164
166 int i;
167
168 bool reset = false;
169 vp1394TwoGrabber g(reset);
171 g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_60);
172 g.open(I);
173
174 g.acquire(I);
175
176#ifdef VISP_HAVE_X11
177 vpDisplayX display(I, 100, 100, "Current image");
178#elif defined(HAVE_OPENCV_HIGHGUI)
179 vpDisplayOpenCV display(I, 100, 100, "Current image");
180#elif defined(VISP_HAVE_GTK)
181 vpDisplayGTK display(I, 100, 100, "Current image");
182#endif
183
186
187 std::cout << std::endl;
188 std::cout << "-------------------------------------------------------" << std::endl;
189 std::cout << " Test program for vpServo " << std::endl;
190 std::cout << " Eye-in-hand task control, velocity computed in the camera space" << std::endl;
191 std::cout << " Use of the Viper850 robot " << std::endl;
192 std::cout << " task : servo 4 points on a square with dimention " << L << " meters" << std::endl;
193 std::cout << "-------------------------------------------------------" << std::endl;
194 std::cout << std::endl;
195
196 vpDot2 dot[4];
197 vpImagePoint cog;
198
199 std::cout << "Click on the 4 dots clockwise starting from upper/left dot..." << std::endl;
200
201 for (i = 0; i < 4; i++) {
202 dot[i].setGraphics(true);
203 dot[i].initTracking(I);
204 cog = dot[i].getCog();
207 }
208
210
211 // Update camera parameters
212 robot.getCameraParameters(cam, I);
213
214 cam.printParameters();
215
216 // Sets the current position of the visual feature
217 vpFeaturePoint p[4];
218 for (i = 0; i < 4; i++)
219 vpFeatureBuilder::create(p[i], cam, dot[i]); // retrieve x,y of the vpFeaturePoint structure
220
221 // Set the position of the square target in a frame which origin is
222 // centered in the middle of the square
223 vpPoint point[4];
224 point[0].setWorldCoordinates(-L, -L, 0);
225 point[1].setWorldCoordinates(L, -L, 0);
226 point[2].setWorldCoordinates(L, L, 0);
227 point[3].setWorldCoordinates(-L, L, 0);
228
229 // Initialise a desired pose to compute s*, the desired 2D point features
231 vpTranslationVector cto(0, 0, 0.5); // tz = 0.5 meter
233 vpRotationMatrix cRo(cro); // Build the rotation matrix
234 cMo.buildFrom(cto, cRo); // Build the homogeneous matrix
235
236 // Sets the desired position of the 2D visual feature
237 vpFeaturePoint pd[4];
238 // Compute the desired position of the features from the desired pose
239 for (int i = 0; i < 4; i++) {
240 vpColVector cP, p;
241 point[i].changeFrame(cMo, cP);
242 point[i].projection(cP, p);
243
244 pd[i].set_x(p[0]);
245 pd[i].set_y(p[1]);
246 pd[i].set_Z(cP[2]);
247 }
248
249 // We want to see a point on a point
250 for (i = 0; i < 4; i++)
251 task.addFeature(p[i], pd[i]);
252
253 // Set the proportional gain
254 task.setLambda(0.3);
255
256 // Display task information
257 task.print();
258
259 // Define the task
260 // - we want an eye-in-hand control law
261 // - articular velocity are computed
264 task.print();
265
266 // Initialise the velocity control of the robot
268
269 bool init_pose_from_linear_method = true;
270 std::cout << "\nHit CTRL-C to stop the loop...\n" << std::flush;
271 for (;;) {
272 // Acquire a new image from the camera
273 g.acquire(I);
274
275 // Display this image
277
278 try {
279 // For each point...
280 for (i = 0; i < 4; i++) {
281 // Achieve the tracking of the dot in the image
282 dot[i].track(I);
283 // Display a green cross at the center of gravity position in the
284 // image
285 cog = dot[i].getCog();
287 }
288 } catch (...) {
289 flog.close(); // Close the log file
290 vpTRACE("Error detected while tracking visual features");
291 robot.stopMotion();
292 return EXIT_FAILURE;
293 }
294
295 // At first iteration, we initialise non linear pose estimation with a linear approach.
296 // For the other iterations, non linear pose estimation is initialized with the pose estimated at previous
297 // iteration of the loop
298 compute_pose(point, dot, 4, cam, cMo, init_pose_from_linear_method);
299 if (init_pose_from_linear_method) {
300 init_pose_from_linear_method = false;
301 }
302
303 for (i = 0; i < 4; i++) {
304 // Update the point feature from the dot location
305 vpFeatureBuilder::create(p[i], cam, dot[i]);
306 // Set the feature Z coordinate from the pose
307 vpColVector cP;
308 point[i].changeFrame(cMo, cP);
309
310 p[i].set_Z(cP[2]);
311 }
312
313 vpColVector v;
314 // Compute the visual servoing skew vector
315 v = task.computeControlLaw();
316
317 // Display the current and desired feature points in the image display
318 vpServoDisplay::display(task, cam, I);
319
320 // Apply the computed joint velocities to the robot
322
323 // Save velocities applied to the robot in the log file
324 // v[0], v[1], v[2] correspond to joint translation velocities in m/s
325 // v[3], v[4], v[5] correspond to joint rotation velocities in rad/s
326 flog << v[0] << " " << v[1] << " " << v[2] << " " << v[3] << " " << v[4] << " " << v[5] << " ";
327
328 // Get the measured joint velocities of the robot
329 vpColVector qvel;
331 // Save measured joint velocities of the robot in the log file:
332 // - qvel[0], qvel[1], qvel[2] correspond to measured joint translation
333 // velocities in m/s
334 // - qvel[3], qvel[4], qvel[5] correspond to measured joint rotation
335 // velocities in rad/s
336 flog << qvel[0] << " " << qvel[1] << " " << qvel[2] << " " << qvel[3] << " " << qvel[4] << " " << qvel[5] << " ";
337
338 // Get the measured joint positions of the robot
339 vpColVector q;
340 robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
341 // Save measured joint positions of the robot in the log file
342 // - q[0], q[1], q[2] correspond to measured joint translation
343 // positions in m
344 // - q[3], q[4], q[5] correspond to measured joint rotation
345 // positions in rad
346 flog << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << " " << q[4] << " " << q[5] << " ";
347
348 // Save feature error (s-s*) for the 4 feature points. For each feature
349 // point, we have 2 errors (along x and y axis). This error is
350 // expressed in meters in the camera frame
351 flog << task.getError() << std::endl;
352
353 // Flush the display
355
356 // std::cout << "|| s - s* || = " << ( task.getError() ).sumSquare() <<
357 // std::endl;
358 }
359
360 std::cout << "Display task information: " << std::endl;
361 task.print();
362 flog.close(); // Close the log file
363 return EXIT_SUCCESS;
364 } catch (const vpException &e) {
365 flog.close(); // Close the log file
366 std::cout << "Catch an exception: " << e.getMessage() << std::endl;
367 return EXIT_FAILURE;
368 }
369}
370
371#else
372int main()
373{
374 std::cout << "You do not have an Viper 850 robot connected to your computer..." << std::endl;
375 return EXIT_SUCCESS;
376}
377#endif
Class for firewire ieee1394 video devices using libdc1394-2.x api.
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
static const vpColor blue
Definition vpColor.h:217
static const vpColor green
Definition vpColor.h:214
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
static void display(const vpImage< unsigned char > &I)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition vpDot2.h:124
void track(const vpImage< unsigned char > &I, bool canMakeTheWindowGrow=true)
Definition vpDot2.cpp:441
void setGraphics(bool activate)
Definition vpDot2.h:311
vpImagePoint getCog() const
Definition vpDot2.h:177
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition vpDot2.cpp:252
error that can be emitted by ViSP classes.
Definition vpException.h:59
const char * getMessage() const
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void set_y(double y)
void set_x(double x)
void set_Z(double Z)
Implementation of an homogeneous matrix and operations on such kind of matrices.
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:135
static bool checkDirectory(const std::string &dirname)
static std::string getUserName()
static void makeDirectory(const std::string &dirname)
static double rad(double deg)
Definition vpMath.h:116
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:77
void set_x(double x)
Set the point x coordinate in the image plane.
Definition vpPoint.cpp:508
void projection(const vpColVector &_cP, vpColVector &_p) const
Definition vpPoint.cpp:219
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &cP) const
Definition vpPoint.cpp:236
void setWorldCoordinates(double oX, double oY, double oZ)
Definition vpPoint.cpp:110
void set_y(double y)
Set the point y coordinate in the image plane.
Definition vpPoint.cpp:510
Class used for pose computation from N points (pose from point only). Some of the algorithms implemen...
Definition vpPose.h:81
void addPoint(const vpPoint &P)
Definition vpPose.cpp:140
@ DEMENTHON_LAGRANGE_VIRTUAL_VS
Definition vpPose.h:102
@ VIRTUAL_VS
Definition vpPose.h:96
bool computePose(vpPoseMethodType method, vpHomogeneousMatrix &cMo, bool(*func)(const vpHomogeneousMatrix &)=NULL)
Definition vpPose.cpp:469
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
void getVelocity(const vpRobot::vpControlFrameType frame, vpColVector &velocity)
@ ARTICULAR_FRAME
Definition vpRobot.h:76
@ CAMERA_FRAME
Definition vpRobot.h:80
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition vpRobot.cpp:198
Implementation of a rotation matrix and operations on such kind of matrices.
Implementation of a rotation vector as Euler angle minimal representation.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition vpServo.cpp:564
@ EYEINHAND_CAMERA
Definition vpServo.h:151
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition vpServo.cpp:299
void setLambda(double c)
Definition vpServo.h:403
void setServo(const vpServoType &servo_type)
Definition vpServo.cpp:210
vpColVector getError() const
Definition vpServo.h:276
@ PSEUDO_INVERSE
Definition vpServo.h:199
vpColVector computeControlLaw()
Definition vpServo.cpp:930
@ CURRENT
Definition vpServo.h:179
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition vpServo.cpp:487
Class that consider the case of a translation vector.
@ TOOL_PTGREY_FLEA2_CAMERA
Definition vpViper850.h:127
#define vpTRACE
Definition vpDebug.h:411