UsTK : Ultrasound ToolKit  version 2.0.1 under development (2025-01-22)
tutorial-servo-target.cpp
1 #include <iostream>
4 
5 #include <visp3/core/vpImageConvert.h>
6 #include <visp3/core/vpMutex.h>
7 #include <visp3/core/vpThread.h>
8 #include <visp3/core/vpTime.h>
9 #include <visp3/gui/vpDisplayX.h>
10 #include <visp3/gui/vpPlot.h>
11 #include <visp3/robot/vpRobotViper850.h>
12 #include <visp3/sensor/vpV4l2Grabber.h>
13 
14 #include <visp3/ustk_core/usImagePostScan2D.h>
15 #include <visp3/ustk_core/usPixelMeterConversion.h>
16 
17 #include <visp3/ustk_template_tracking/usDenseTracker2D.h>
18 
19 #if defined(VISP_HAVE_V4L2) && defined(VISP_HAVE_PTHREAD) && defined(VISP_HAVE_VIPER850)
20 
21 // Shared vars
22 typedef enum { capture_waiting, capture_started, capture_stopped } t_CaptureState;
23 t_CaptureState s_capture_state = capture_waiting;
24 vpImage<unsigned char> s_frame;
25 vpMutex s_mutex_capture;
26 
27 bool s_robotIsRunning;
28 vpMutex s_mutex_control_velocity;
29 vpColVector s_controlVelocity(6, 0.);
30 
32 
34 vpThread::Return captureFunction(vpThread::Args args)
35 {
36  vpV4l2Grabber cap = *((vpV4l2Grabber *)args);
37  vpImage<unsigned char> frame_;
38  bool stop_capture_ = false;
39 
40  cap.open(frame_);
41 
42  vpRect roi(vpImagePoint(55, 70), vpImagePoint(410, 555)); // roi to remove sonosite banners
43 
44  while (!stop_capture_) {
45  // Capture in progress
46  cap.acquire(frame_, roi); // get a new frame from camera
47 
48  // Update shared data
49  {
50  vpMutex::vpScopedLock lock(s_mutex_capture);
51  if (s_capture_state == capture_stopped)
52  stop_capture_ = true;
53  else
54  s_capture_state = capture_started;
55  s_frame = frame_;
56  }
57  }
58 
59  {
60  vpMutex::vpScopedLock lock(s_mutex_capture);
61  s_capture_state = capture_stopped;
62  }
63  std::cout << "End of capture thread" << std::endl;
64  return 0;
65 }
67 
69 vpThread::Return displayFunction(vpThread::Args args)
70 {
71  (void)args; // Avoid warning: unused parameter args
73  usDenseTracker2D tracker;
74  vpRectOriented rectangle;
75 
76  postScan_.setTransducerRadius(0.060);
77  postScan_.setTransducerConvexity(true);
78  postScan_.setScanLineNumber(128);
79  postScan_.setFieldOfView(vpMath::rad(57.0)); // field of view is 57 deg
80  postScan_.setDepth(0.12);
81  postScan_.setProbeName("Sonosite C60");
82 
83  // convert(const usImagePostScan2D<unsigned char> &image, const double &u, const double &v, double &x, double &y);
84 
85  t_CaptureState capture_state_;
86  bool display_initialized_ = false;
87 
88 #if defined(VISP_HAVE_X11)
89  vpDisplayX *dpost_scan_ = NULL; // display post-scan image
90 
91 #endif
92 
93  vpPlot plot(1);
94  plot.initGraph(0, 1);
95  plot.initRange(0, 0, 10, -0.01, 0.01);
96  plot.setTitle(0, "X target error");
97  plot.setUnitY(0, "error(meters)");
98  plot.setLegend(0, 0, "time");
99 
100  double xtarget;
101  double ytarget;
102 
103  bool firstLoopCycle = true;
104 
105  double startTime = vpTime::measureTimeMs();
106 
107  do {
108  s_mutex_capture.lock();
109  capture_state_ = s_capture_state;
110  s_mutex_capture.unlock();
111 
112  // Check if a frame is available
113  if (capture_state_ == capture_started) {
114  // Create a copy of the captured frame
115  {
116  vpMutex::vpScopedLock lock(s_mutex_capture);
117  postScan_.setData(s_frame);
118  postScan_.setHeightResolution(
119  (postScan_.getDepth() + postScan_.getTransducerRadius() * (1 - cos(postScan_.getFieldOfView() / 2.0))) /
120  postScan_.getHeight());
121  postScan_.setWidthResolution(
122  (postScan_.getDepth() + postScan_.getTransducerRadius() * (1 - cos(postScan_.getFieldOfView() / 2.0))) /
123  postScan_.getHeight());
124  }
125 
126  // Convert post-scan to pre-scan image
127  if (firstLoopCycle) {
128  rectangle.setCenter(vpImagePoint(postScan_.getHeight() / 2, postScan_.getWidth() / 2));
129  rectangle.setSize(50, 30);
130  rectangle.setOrientation(0);
131  tracker.init(postScan_, rectangle);
132  }
133 
134  tracker.update(postScan_);
135  rectangle = tracker.getTarget();
136 
137  usPixelMeterConversion::convert(postScan_, rectangle.getCenter().get_j(), rectangle.getCenter().get_i(), xtarget,
138  ytarget);
139 
140  std::cout << "Height resolution : " << postScan_.getHeightResolution() << std::endl;
141  std::cout << "Width resolution : " << postScan_.getWidthResolution() << std::endl;
142  std::cout << "Center i = " << rectangle.getCenter().get_i() << ", j = " << rectangle.getCenter().get_j()
143  << std::endl;
144  std::cout << "xtarget = " << xtarget << ", ytarget = " << ytarget << std::endl;
145 
146  double time = (vpTime::measureTimeMs() - startTime) / 1000.0;
147 
148  plot.plot(0, 0, time, xtarget);
149 
150  {
151  vpMutex::vpScopedLock lock(s_mutex_capture);
152  s_controlVelocity = 0.0;
153  s_controlVelocity[1] = -1 * xtarget;
154  }
155  // Check if we need to initialize the display with the first frame
156  if (!display_initialized_) {
157 // Initialize the display
158 #if defined(VISP_HAVE_X11)
159  unsigned int xpos = 10;
160  dpost_scan_ = new vpDisplayX(postScan_, xpos, 10, "post-scan");
161  display_initialized_ = true;
162 #endif
163  }
164 
165  // Display the image
166  vpDisplay::display(postScan_);
167 
168  vpDisplay::displayLine(postScan_, static_cast<int>(rectangle.getTopLeft().get_i()),
169  static_cast<int>(rectangle.getTopLeft().get_j()),
170  static_cast<int>(rectangle.getBottomLeft().get_i()),
171  static_cast<int>(rectangle.getBottomLeft().get_j()), vpColor::red);
172  vpDisplay::displayLine(postScan_, static_cast<int>(rectangle.getBottomLeft().get_i()),
173  static_cast<int>(rectangle.getBottomLeft().get_j()),
174  static_cast<int>(rectangle.getBottomRight().get_i()),
175  static_cast<int>(rectangle.getBottomRight().get_j()), vpColor::red);
176  vpDisplay::displayLine(postScan_, static_cast<int>(rectangle.getBottomRight().get_i()),
177  static_cast<int>(rectangle.getBottomRight().get_j()),
178  static_cast<int>(rectangle.getTopRight().get_i()),
179  static_cast<int>(rectangle.getTopRight().get_j()), vpColor::red);
180  vpDisplay::displayLine(postScan_, static_cast<int>(rectangle.getTopRight().get_i()),
181  static_cast<int>(rectangle.getTopRight().get_j()),
182  static_cast<int>(rectangle.getTopLeft().get_i()),
183  static_cast<int>(rectangle.getTopLeft().get_j()), vpColor::red);
184 
185  // Trigger end of acquisition with a mouse click
186  vpDisplay::displayText(postScan_, 10, 10, "Click to exit...", vpColor::red);
187  if (vpDisplay::getClick(postScan_, false)) {
188  vpMutex::vpScopedLock lock(s_mutex_capture);
189  s_capture_state = capture_stopped;
190  }
191 
192  // Update the display
193  vpDisplay::flush(postScan_);
194  firstLoopCycle = false;
195  } else {
196  vpTime::wait(2); // Sleep 2ms
197  }
198  } while (capture_state_ != capture_stopped);
199 
200 #if defined(VISP_HAVE_X11)
201  delete dpost_scan_;
202 #endif
203 
204  std::cout << "End of display thread" << std::endl;
205  return 0;
206 }
208 
209 vpThread::Return controlFunction(vpThread::Args args)
210 {
211  (void)args;
212  vpRobotViper850 robot;
213 
214  vpMatrix eJe; // robot jacobian
215 
216  // Transformation from end-effector frame to the force/torque sensor
217  // Note that the end-effector frame is located on the lower part of
218  // male component of the tool changer.
219  vpHomogeneousMatrix eMs;
220  eMs[2][3] = -0.062; // tz = -6.2cm
221 
222  // Transformation from force/torque sensor to the probe frame from where
223  // we want to control the robot
224  vpHomogeneousMatrix sMp;
225 
226  // Transformation from force/torque sensor to the end-effector frame
227  vpHomogeneousMatrix sMe;
228  eMs.inverse(sMe);
229 
230  // Build the transformation that allows to convert a velocity in the
231  // end-effector frame to the FT sensor frame
232  vpVelocityTwistMatrix sVe;
233  sVe.buildFrom(sMe);
234 
235  vpColVector sHs(6); // force/torque sensor measures
236  vpColVector sHs_star(6); // force/torque sensor desired values in sensor frame
237  vpColVector pHp_star(6); // force/torque sensor desired values in probe frame
238  vpColVector gHg(6); // force/torque due to the gravity
239  vpMatrix lambda(6, 6);
240  // Position of the cog of the object attached after the sensor in the sensor frame
241  vpTranslationVector stg;
242  vpColVector sHs_bias(6); // force/torque sensor measures for bias
243 
244  // Cartesian velocities corresponding to the force/torque control in the
245  // sensor frame
246  vpColVector v_s(6);
247  // Joint velocities corresponding to the force/torque control
248  vpColVector q_dot(6);
249 
250  // Initialized the force gain
251  lambda = 0;
252  for (int i = 0; i < 3; i++)
253  lambda[i][i] = 0.02 / 6;
254  // Initialized the torque gain
255  for (int i = 3; i < 6; i++)
256  lambda[i][i] = 1. / 2;
257 
258  // Initialize the desired force/torque values
259  pHp_star = 0;
260  pHp_star[2] = 3; // Fz = 3N
261  //
262  // Case of the C65 US probe
263  //
264  // Set the probe frame control
265  sMp[2][3] = 0.262; // tz = 26.2cm
266 
267  // Init the force/torque due to the gravity
268  gHg[2] = -(0.696 + 0.476) * 9.81; // m*g
269 
270  // Position of the cog of the object attached after the sensor in the sensor frame
271  stg[0] = 0;
272  stg[1] = 0;
273  stg[2] = 0.088; // tz = 88.4mm
274 
275  vpRotationMatrix sRp;
276  sMp.extract(sRp);
277  vpTranslationVector stp;
278  sMp.extract(stp);
279 
280  vpHomogeneousMatrix eMp = eMs * sMp;
281  vpVelocityTwistMatrix eVp(eMp);
282 
283  // Get the position of the end-effector in the reference frame
284  vpColVector q;
285  vpHomogeneousMatrix fMe;
286  vpHomogeneousMatrix fMs;
287  vpRotationMatrix sRf;
288  robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
289  robot.get_fMe(q, fMe);
290  // Compute the position of the sensor frame in the reference frame
291  fMs = fMe * eMs;
292  vpHomogeneousMatrix sMf;
293  fMs.inverse(sMf);
294  sMf.extract(sRf);
295 
296  // Build the transformation that allows to convert the forces due to the
297  // gravity in the sensor frame
298  vpForceTwistMatrix sFg(sMf); // Only the rotation part is to consider
299  // Modify the translational part
300  for (int i = 0; i < 3; i++)
301  for (int j = 0; j < 3; j++)
302  sFg[i + 3][j] = (stg.skew() * sRf)[i][j];
303 
304  // Build the transformation that allows to convert a FT expressed in the
305  // FT probe frame into the sensor frame
306  vpForceTwistMatrix sFp(sMp);
307 
308  // Bias the force/torque sensor
309  std::cout << "\nBias the force/torque sensor...\n " << std::endl;
310  robot.biasForceTorqueSensor();
311 
312  // Set the robot to velocity control
313  robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
314 
315  int iter = 0;
316  t_CaptureState capture_state_;
317 
318  std::cout << "Starting control loop..." << std::endl;
319  do {
320  s_mutex_capture.lock();
321  capture_state_ = s_capture_state;
322  s_mutex_capture.unlock();
323 
324  // Check if a frame is available
325  if (capture_state_ == capture_started) {
326 
327  // Get the force/torque measures from the sensor
328  sHs = robot.getForceTorque();
329 
330  // Multiply the measures by -1 to get the force/torque exerced by the
331  // robot to the environment.
332  sHs *= -1;
333 
334  // Update the gravity transformation matrix
335  robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
336  robot.get_fMe(q, fMe);
337  // Compute the position of the sensor frame in the reference frame
338  fMs = fMe * eMs;
339  // Compute the inverse transformation
340  fMs.inverse(sMf);
341  sMf.extract(sRf);
342  // Update the transformation that allows to convert the forces due to the
343  // gravity in the sensor frame
344  sFg.buildFrom(sMf); // Only the rotation part is to consider
345  // Modify the translational part
346  for (int i = 0; i < 3; i++)
347  for (int j = 0; j < 3; j++)
348  sFg[i + 3][j] = (stg.skew() * sRf)[i][j];
349 
350  if (iter == 0) {
351  sHs_bias = sHs - sFg * gHg;
352  }
353 
354  // Compute rotation in probe frame from control velocity deduced of the confidence map barycenter
355  vpColVector v_p;
356  {
357  vpMutex::vpScopedLock lock(s_mutex_capture);
358  v_p = s_controlVelocity;
359  }
360 
361  v_p[0] = 0;
362  v_p[2] = 0;
363  v_p[3] = 0;
364  v_p[4] = 0;
365  v_p[5] = 0;
366 
367  // Compute the force/torque control law in the sensor frame
368  v_s = lambda * (sFp * pHp_star - (sHs - sFg * gHg - sHs_bias));
369 
370  v_s[0] = 0.0;
371  v_s[1] = 0.0;
372  v_s[3] = 0.0;
373  v_s[4] = 0.0;
374  v_s[5] = 0.0;
375 
376  vpVelocityTwistMatrix eVs;
377  sVe.inverse(eVs);
378 
379  vpColVector v_e = eVs * v_s + eVp * v_p;
380 
381  // Get the robot jacobian eJe
382  robot.get_eJe(eJe);
383 
384  // Compute the joint velocities to achieve the force/torque control
385  q_dot = eJe.pseudoInverse() * v_e;
386 
387  // Send the joint velocities to the robot
388  robot.setVelocity(vpRobot::ARTICULAR_FRAME, q_dot);
389 
390  iter++;
391  }
392  vpTime::wait(1); // 5
393  } while (capture_state_ != capture_stopped);
394 
395  std::cout << "End of control thread" << std::endl;
396  return 0;
397 }
398 
400 int main(int argc, const char *argv[])
401 {
402  unsigned int opt_input = 1; // Default value is 1 to mach the material in the lab
403 
404  // Command line options
405  for (int i = 0; i < argc; i++) {
406  if (std::string(argv[i]) == "--input")
407  opt_input = (unsigned int)atoi(argv[i + 1]);
408  else if (std::string(argv[i]) == "--help") {
409  std::cout << "Usage: " << argv[0] << " [--input <number>] [--help]" << std::endl;
410  return 0;
411  }
412  }
413 
414  // Instantiate the grabber
415  vpV4l2Grabber g;
416 
417  g.setInput(opt_input);
418  g.setScale(1);
419 
420  // init
421  s_robotIsRunning = false;
422  s_controlVelocity = vpColVector(6, 0);
423 
424  // Start the threads
425  vpThread thread_capture((vpThread::Fn)captureFunction, (vpThread::Args)&g);
426  vpThread thread_display((vpThread::Fn)displayFunction);
427  vpThread thread_control((vpThread::Fn)controlFunction);
428 
429  // Wait until thread ends up
430  thread_capture.join();
431  thread_display.join();
432  thread_control.join();
433 
434  return 0;
435 }
437 
438 #else
439 int main()
440 {
441 #ifndef VISP_HAVE_V4L2
442  std::cout << "You should enable V4L2 to make this example working..." << std::endl;
443 #elif !defined(_WIN32) && (defined(__unix__) || defined(__unix) || (defined(__APPLE__) && defined(__MACH__))) // UNIX
444  std::cout << "You should enable pthread usage and rebuild ViSP..." << std::endl;
445 #else
446  std::cout << "Multi-threading seems not supported on this platform" << std::endl;
447 #endif
448 #ifndef VISP_HAVE_VIPER850
449  std::cout << "You need viper 850 robot to run this example" << std::endl;
450 #endif
451 }
452 
453 #endif
2D region tracker
void update(const vpImage< unsigned char > &I)
Tracking method, to call at every new frame to track.
void init(const vpImage< unsigned char > &I, const vpRectOriented &R)
Initialisation of the tracker : to call to set the region to track (R) in the image (I) before starti...
vpRectOriented getTarget() const
To call after update() at each new frame, to get the position of the ROI in the last acquired frame.
void setHeightResolution(double heightResolution)
void setData(const vpImage< Type > &image)
double getHeightResolution() const
void setWidthResolution(double widthResolution)
double getWidthResolution() const
static void convert(const usImagePostScan2D< unsigned char > &image, const double &u, const double &v, double &x, double &y)
Conversion for 2D post-scan images.
void setFieldOfView(double fieldOfView)
void setTransducerConvexity(const bool isTransducerConvex)
void setDepth(double depth)
void setProbeName(std::string probeName)
void setTransducerRadius(const double transducerRadius)
double getTransducerRadius() const
void setScanLineNumber(unsigned int scanLineNumber)