UsTK : Ultrasound ToolKit  version 2.0.1 under development (2025-02-01)
tutorial-ultrasonix-servo-target-confidence.cpp
1 #include <iostream>
4 
5 #include <visp3/ustk_core/usConfig.h>
6 
7 #include <visp3/core/vpImageConvert.h>
8 #include <visp3/core/vpMutex.h>
9 #include <visp3/core/vpThread.h>
10 #include <visp3/core/vpTime.h>
11 #include <visp3/gui/vpPlot.h>
12 #include <visp3/robot/vpRobotViper850.h>
13 
14 #include <visp3/ustk_core/usImagePostScan2D.h>
15 #include <visp3/ustk_core/usImagePreScan2D.h>
16 #include <visp3/ustk_core/usPixelMeterConversion.h>
17 #include <visp3/ustk_core/usPostScanToPreScan2DConverter.h>
18 #include <visp3/ustk_core/usPreScanToPostScan2DConverter.h>
19 
20 #include <visp3/ustk_grabber/usNetworkGrabberPreScan2D.h>
21 
22 #include <visp3/ustk_confidence_map/usScanlineConfidence2D.h>
23 
24 #include <visp3/ustk_template_tracking/usDenseTracker2D.h>
25 
26 #if defined(VISP_HAVE_V4L2) && defined(VISP_HAVE_PTHREAD) && defined(VISP_HAVE_VIPER850) && \
27  defined(VISP_HAVE_MODULE_USTK_GRABBER)
28 
29 #include <QApplication>
30 
31 #include <visp3/gui/vpDisplayX.h>
32 // Shared vars
33 typedef enum { capture_waiting, capture_started, capture_stopped } t_CaptureState;
34 t_CaptureState s_capture_state = capture_waiting;
35 vpMutex s_mutex_capture;
36 
37 typedef enum { ROI_waiting, ROI_started } t_stateInitROI;
38 t_stateInitROI s_ROI_state = ROI_waiting;
39 vpMutex s_mutex_image;
41 vpMutex s_mutex_ROI_state;
42 
43 bool s_robotIsRunning;
44 vpMutex s_mutex_control_velocity;
45 vpColVector s_controlVelocity(6, 0.);
46 
48 
50 vpThread::Return displayFunction(vpThread::Args args)
51 {
52  (void)args; // Avoid warning: unused parameter args
53 
56  usImagePostScan2D<unsigned char> confidencePostScan_;
57  usImagePreScan2D<unsigned char> confidencePreScan_;
59  usPostScanToPreScan2DConverter backConverter_;
60  usScanlineConfidence2D confidenceMapProcessor_;
61  usDenseTracker2D tracker;
62  vpRectOriented rectangle;
63  bool rectangleRoiDefined = false;
64  t_stateInitROI stateROI = ROI_waiting;
65 
66  t_CaptureState capture_state_;
67  bool display_initialized_ = false;
68 
69 #if defined(VISP_HAVE_X11)
70  vpDisplayX *dpost_scan_ = NULL; // display post-scan image
71  vpDisplayX *dpost_scan_confidence_ = NULL; // display post-scan image
72 #endif
73 
74  vpPlot plot(2);
75  plot.initGraph(0, 1);
76  plot.setTitle(0, "X target error");
77  plot.setUnitY(0, "error");
78  plot.setLegend(0, 0, "time");
79 
80  plot.initGraph(1, 1);
81  plot.setTitle(1, "confidence barycenter error");
82  plot.setUnitY(1, "error");
83  plot.setLegend(1, 0, "time");
84 
85  // BUG :
86  // plot.initRange(0, 0, 10, -0.01, 0.01);
87  // plot.initRange(1, 0, 10, 5.0, 5.0);
88 
89  double xtarget;
90  double ytarget;
91 
92  vpImagePoint ROI_centralPoint;
93 
94  double startTime = vpTime::measureTimeMs();
95 
96  do {
97  s_mutex_capture.lock();
98  capture_state_ = s_capture_state;
99  s_mutex_capture.unlock();
100 
101  // Check if a frame is available
102  if (capture_state_ == capture_started) {
103  // Create a copy of the captured frame
104  {
105  vpMutex::vpScopedLock lock(s_mutex_image);
106  postScan_ = s_frame;
107  }
108 
109  // target init (wait for ROI definition)
110  if (rectangleRoiDefined && stateROI == ROI_waiting) {
111  tracker.init(postScan_, rectangle);
112  s_mutex_ROI_state.lock();
113  s_ROI_state = ROI_started;
114  s_mutex_ROI_state.unlock();
115  stateROI = ROI_started;
116  }
117 
118  // confidence
119  backConverter_.convert(postScan_, preScan_, 480);
120  // Compute confidence map on pre-scan image
121  confidenceMapProcessor_.run(confidencePreScan_, preScan_);
122  // converting computed confidence map in post-scan
123  converter_.convert(confidencePreScan_, confidencePostScan_);
124 
125  // if ROI defined, we can send commands
126  if (stateROI == ROI_started) {
127  tracker.update(postScan_);
128  rectangle = tracker.getTarget();
129 
130  usPixelMeterConversion::convert(postScan_, rectangle.getCenter().get_j(), rectangle.getCenter().get_i(),
131  xtarget, ytarget);
132 
133  double ttarget = atan2(xtarget, ytarget);
134 
135  double time = (vpTime::measureTimeMs() - startTime) / 1000.0;
136 
137  unsigned int height(confidencePreScan_.getHeight()), width(confidencePreScan_.getWidth());
138 
139  double I_sum = 0.0;
140  for (unsigned int i = 0; i < height; ++i)
141  for (unsigned int j = 0; j < width; ++j)
142  I_sum += static_cast<double>(confidencePreScan_[i][j]);
143 
144  double yc = 0.0;
145  for (unsigned int x = 0; x < height; ++x)
146  for (unsigned int y = 0; y < width; ++y) {
147  yc += y * confidencePreScan_[x][y];
148  }
149  yc /= I_sum;
150 
151  double tc = yc * confidencePreScan_.getScanLinePitch() - confidencePreScan_.getFieldOfView() / 2.0;
152 
153  plot.plot(0, 0, time, xtarget);
154  plot.plot(1, 0, time, vpMath::deg(tc));
155 
156  {
157  vpMutex::vpScopedLock lock(s_mutex_control_velocity);
158 
159  double lambda_t = 0.8;
160  double lambda_c = 0.8;
161 
162  s_controlVelocity = 0.0;
163  s_controlVelocity[1] = -lambda_t * xtarget + lambda_c * (tc - ttarget) * ytarget;
164  s_controlVelocity[3] = lambda_c * (tc - ttarget);
165  }
166  }
167  // Check if we need to initialize the display with the first frame
168  if (!display_initialized_) {
169 // Initialize the display
170 #if defined(VISP_HAVE_X11)
171  unsigned int xpos = 10;
172  dpost_scan_ = new vpDisplayX(postScan_, xpos, 10, "post-scan");
173  xpos += 300;
174  dpost_scan_confidence_ = new vpDisplayX(confidencePostScan_, xpos, 10, "post-scan confidence");
175  display_initialized_ = true;
176 #endif
177  }
178 
179  // Display the image
180  vpDisplay::display(postScan_);
181 
182  if (rectangleRoiDefined) {
183  vpDisplay::displayLine(postScan_, static_cast<int>(rectangle.getTopLeft().get_i()),
184  static_cast<int>(rectangle.getTopLeft().get_j()),
185  static_cast<int>(rectangle.getBottomLeft().get_i()),
186  static_cast<int>(rectangle.getBottomLeft().get_j()), vpColor::red);
187  vpDisplay::displayLine(postScan_, static_cast<int>(rectangle.getBottomLeft().get_i()),
188  static_cast<int>(rectangle.getBottomLeft().get_j()),
189  static_cast<int>(rectangle.getBottomRight().get_i()),
190  static_cast<int>(rectangle.getBottomRight().get_j()), vpColor::red);
191  vpDisplay::displayLine(postScan_, static_cast<int>(rectangle.getBottomRight().get_i()),
192  static_cast<int>(rectangle.getBottomRight().get_j()),
193  static_cast<int>(rectangle.getTopRight().get_i()),
194  static_cast<int>(rectangle.getTopRight().get_j()), vpColor::red);
195  vpDisplay::displayLine(postScan_, static_cast<int>(rectangle.getTopRight().get_i()),
196  static_cast<int>(rectangle.getTopRight().get_j()),
197  static_cast<int>(rectangle.getTopLeft().get_i()),
198  static_cast<int>(rectangle.getTopLeft().get_j()), vpColor::red);
199  }
200 
201  // Display the confidence map
202  vpDisplay::display(confidencePostScan_);
203 
204  // ROI definition with a mouse click
205  vpDisplay::displayText(postScan_, 10, 10, "Click to define ROI...", vpColor::red);
206  if (vpDisplay::getClick(postScan_, ROI_centralPoint, false)) {
207  // update rectangle only on the first click
208  if (stateROI == ROI_waiting) {
209  rectangle.setCenter(vpImagePoint(ROI_centralPoint.get_v(), ROI_centralPoint.get_u()));
210  rectangle.setSize(80, 50);
211  rectangle.setOrientation(0);
212  rectangleRoiDefined = true;
213  }
214  }
215 
216  // Trigger end of acquisition with a mouse click on confidence
217  vpDisplay::displayText(confidencePostScan_, 10, 10, "Click to exit...", vpColor::red);
218  if (vpDisplay::getClick(confidencePostScan_, false)) {
219  vpMutex::vpScopedLock lock(s_mutex_capture);
220  s_capture_state = capture_stopped;
221  }
222 
223  // Update the display
224  vpDisplay::flush(postScan_);
225  vpDisplay::flush(confidencePostScan_);
226  } else {
227  vpTime::wait(2); // Sleep 2ms
228  }
229  } while (capture_state_ != capture_stopped);
230 
231 #if defined(VISP_HAVE_X11)
232  delete dpost_scan_;
233  delete dpost_scan_confidence_;
234 #endif
235 
236  std::cout << "End of display thread" << std::endl;
237  return 0;
238 }
240 
241 vpThread::Return controlFunction(vpThread::Args args)
242 {
243  (void)args;
244  vpRobotViper850 robot;
245 
246  vpMatrix eJe; // robot jacobian
247 
248  // Transformation from end-effector frame to the force/torque sensor
249  // Note that the end-effector frame is located on the lower part of
250  // male component of the tool changer.
251  vpHomogeneousMatrix eMs;
252  eMs[2][3] = -0.062; // tz = -6.2cm
253 
254  // Transformation from force/torque sensor to the probe frame from where
255  // we want to control the robot
256  vpHomogeneousMatrix sMp;
257 
258  // Transformation from force/torque sensor to the end-effector frame
259  vpHomogeneousMatrix sMe;
260  eMs.inverse(sMe);
261 
262  // Build the transformation that allows to convert a velocity in the
263  // end-effector frame to the FT sensor frame
264  vpVelocityTwistMatrix sVe;
265  sVe.buildFrom(sMe);
266 
267  vpColVector sHs(6); // force/torque sensor measures
268  vpColVector sHs_star(6); // force/torque sensor desired values in sensor frame
269  vpColVector pHp_star(6); // force/torque sensor desired values in probe frame
270  vpColVector gHg(6); // force/torque due to the gravity
271  vpMatrix lambda(6, 6);
272  // Position of the cog of the object attached after the sensor in the sensor frame
273  vpTranslationVector stg;
274  vpColVector sHs_bias(6); // force/torque sensor measures for bias
275 
276  // Cartesian velocities corresponding to the force/torque control in the
277  // sensor frame
278  vpColVector v_s(6);
279  // Joint velocities corresponding to the force/torque control
280  vpColVector q_dot(6);
281 
282  // Initialized the force gain
283  lambda = 0;
284  for (int i = 0; i < 3; i++)
285  lambda[i][i] = 0.001;
286  // Initialized the torque gain
287  for (int i = 3; i < 6; i++)
288  lambda[i][i] = 0.005;
289 
290  // Initialize the desired force/torque values
291  pHp_star = 0;
292  pHp_star[2] = 3; // Fz = 3N
293  //
294  // Case of the C65 US probe
295  //
296  // Set the probe frame control
297  sMp[2][3] = 0.262; // tz = 26.2cm
298 
299  // Init the force/torque due to the gravity
300  gHg[2] = -(0.696 + 0.476) * 9.81; // m*g
301 
302  // Position of the cog of the object attached after the sensor in the sensor frame
303  stg[0] = 0;
304  stg[1] = 0;
305  stg[2] = 0.088; // tz = 88.4mm
306 
307  vpRotationMatrix sRp;
308  sMp.extract(sRp);
309  vpTranslationVector stp;
310  sMp.extract(stp);
311 
312  vpHomogeneousMatrix eMp = eMs * sMp;
313  vpVelocityTwistMatrix eVp(eMp);
314 
315  // Get the position of the end-effector in the reference frame
316  vpColVector q;
317  vpHomogeneousMatrix fMe;
318  vpHomogeneousMatrix fMs;
319  vpRotationMatrix sRf;
320  robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
321  robot.get_fMe(q, fMe);
322  // Compute the position of the sensor frame in the reference frame
323  fMs = fMe * eMs;
324  vpHomogeneousMatrix sMf;
325  fMs.inverse(sMf);
326  sMf.extract(sRf);
327 
328  // Build the transformation that allows to convert the forces due to the
329  // gravity in the sensor frame
330  vpForceTwistMatrix sFg(sMf); // Only the rotation part is to consider
331  // Modify the translational part
332  for (int i = 0; i < 3; i++)
333  for (int j = 0; j < 3; j++)
334  sFg[i + 3][j] = (stg.skew() * sRf)[i][j];
335 
336  // Build the transformation that allows to convert a FT expressed in the
337  // FT probe frame into the sensor frame
338  vpForceTwistMatrix sFp(sMp);
339 
340  // Bias the force/torque sensor
341  std::cout << "\nBias the force/torque sensor...\n " << std::endl;
342  robot.biasForceTorqueSensor();
343 
344  // Set the robot to velocity control
345  robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
346 
347  int iter = 0;
348  t_CaptureState capture_state_;
349  t_stateInitROI ROI_state = ROI_waiting;
350 
351  std::cout << "Waiting ROI definition to start control loop..." << std::endl;
352  while (ROI_state == ROI_waiting) {
353  vpMutex::vpScopedLock lock(s_mutex_ROI_state);
354  ROI_state = s_ROI_state;
355 
356  // Case of end of capture before ROI definition
357  s_mutex_capture.lock();
358  capture_state_ = s_capture_state;
359  s_mutex_capture.unlock();
360  if (capture_state_ == capture_stopped) {
361  std::cout << "end of control thread" << std::endl;
362  return 0;
363  }
364  }
365 
366  std::cout << "Starting control loop..." << std::endl;
367  do {
368  s_mutex_capture.lock();
369  capture_state_ = s_capture_state;
370  s_mutex_capture.unlock();
371 
372  // Check if a frame is available
373  if (capture_state_ == capture_started) {
374 
375  // Get the force/torque measures from the sensor
376  sHs = robot.getForceTorque();
377 
378  // Multiply the measures by -1 to get the force/torque exerced by the
379  // robot to the environment.
380  sHs *= -1;
381 
382  // Update the gravity transformation matrix
383  robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
384  robot.get_fMe(q, fMe);
385  // Compute the position of the sensor frame in the reference frame
386  fMs = fMe * eMs;
387  // Compute the inverse transformation
388  fMs.inverse(sMf);
389  sMf.extract(sRf);
390  // Update the transformation that allows to convert the forces due to the
391  // gravity in the sensor frame
392  sFg.buildFrom(sMf); // Only the rotation part is to consider
393  // Modify the translational part
394  for (int i = 0; i < 3; i++)
395  for (int j = 0; j < 3; j++)
396  sFg[i + 3][j] = (stg.skew() * sRf)[i][j];
397 
398  if (iter == 0) {
399  sHs_bias = sHs - sFg * gHg;
400  }
401 
402  // Compute rotation in probe frame from control velocity deduced of the confidence map barycenter
403  vpColVector v_p;
404  {
405  vpMutex::vpScopedLock lock(s_mutex_control_velocity);
406  v_p = s_controlVelocity;
407  }
408 
409  v_p[0] = 0;
410  v_p[2] = 0;
411  v_p[4] = 0;
412  v_p[5] = 0;
413 
414  // Compute the force/torque control law in the sensor frame
415  v_s = lambda * (sFp * pHp_star - (sHs - sFg * gHg - sHs_bias));
416 
417  v_s[0] = 0.0;
418  v_s[1] = 0.0;
419  v_s[3] = 0.0;
420  v_s[4] = 0.0;
421  v_s[5] = 0.0;
422 
423  vpVelocityTwistMatrix eVs;
424  sVe.inverse(eVs);
425 
426  vpColVector v_e = eVs * v_s + eVp * v_p;
427 
428  // std::cout << v_e.t() << std::endl;
429 
430  // Get the robot jacobian eJe
431  robot.get_eJe(eJe);
432 
433  // Compute the joint velocities to achieve the force/torque control
434  q_dot = eJe.pseudoInverse() * v_e;
435 
436  // Send the joint velocities to the robot
437  robot.setVelocity(vpRobot::ARTICULAR_FRAME, q_dot);
438 
439  iter++;
440  }
441  vpTime::wait(1); // 5
442  } while (capture_state_ != capture_stopped);
443 
444  std::cout << "End of control thread" << std::endl;
445  return 0;
446 }
447 
449 int main(int argc, char **argv)
450 {
451  QApplication app(argc, argv);
452 
453  // init
454  s_robotIsRunning = false;
455  s_controlVelocity = vpColVector(6, 0);
456 
457  // Start the threads
458  vpThread thread_display((vpThread::Fn)displayFunction);
459  vpThread thread_control((vpThread::Fn)controlFunction);
460 
462  usImagePostScan2D<unsigned char> postScan_local;
463 
464  // starts the grabber
466  qtGrabber->connectToServer();
467 
468  // setting acquisition parameters
470  header.probeId = 15; // 4DC7 id = 15
471  header.slotId = 0; // top slot id = 0
472  header.imagingMode = 0; // B-mode = 0
473 
474  // sending acquisition parameters
475  qtGrabber->initAcquisition(header);
476 
477  // set the 4DC7 motor on the middle frame
478  qtGrabber->setMotorPosition(37);
479  qtGrabber->sendAcquisitionParameters();
480  qtGrabber->runAcquisition();
481 
482  bool stop_capture_ = false;
483 
484  t_CaptureState capture_state_local = capture_waiting;
485 
486  usImagePreScan2D<unsigned char> *grabbedImage;
487 
488  while (!stop_capture_) {
489  s_mutex_capture.lock();
490  capture_state_local = s_capture_state;
491  s_mutex_capture.unlock();
492  if (capture_state_local == capture_stopped)
493  stop_capture_ = true;
494  else if (qtGrabber->isFirstFrameAvailable()) {
495  grabbedImage = qtGrabber->acquire();
496 
497  converter.convert(*(grabbedImage), postScan_local);
498 
499  s_mutex_image.lock();
500  s_frame = postScan_local;
501  s_mutex_image.unlock();
502 
503  s_mutex_capture.lock();
504  s_capture_state = capture_started;
505  s_mutex_capture.unlock();
506  }
507  }
508 
509  // Wait until thread ends up
510  thread_display.join();
511  thread_control.join();
512 
513  app.quit();
514  return 0;
515 }
517 
518 #else
519 int main()
520 {
521 #ifndef VISP_HAVE_V4L2
522  std::cout << "You should enable V4L2 to make this example working..." << std::endl;
523 #elif !defined(_WIN32) && (defined(__unix__) || defined(__unix) || (defined(__APPLE__) && defined(__MACH__))) // UNIX
524  std::cout << "You should enable pthread usage and rebuild ViSP..." << std::endl;
525 #else
526  std::cout << "Multi-threading seems not supported on this platform" << std::endl;
527 #endif
528 #ifndef VISP_HAVE_VIPER850
529  std::cout << "You need viper 850 robot to run this example" << std::endl;
530 #endif
531 }
532 
533 #endif
2D region tracker
void update(const vpImage< unsigned char > &I)
Tracking method, to call at every new frame to track.
void init(const vpImage< unsigned char > &I, const vpRectOriented &R)
Initialisation of the tracker : to call to set the region to track (R) in the image (I) before starti...
vpRectOriented getTarget() const
To call after update() at each new frame, to get the position of the ROI in the last acquired frame.
Specific class to grab pre-scan frames from the ultrasound station on the network.
usFrameGrabbedInfo< usImagePreScan2D< unsigned char > > * acquire()
bool initAcquisition(const usNetworkGrabber::usInitHeaderSent &header)
void setMotorPosition(int motorPosition)
static void convert(const usImagePostScan2D< unsigned char > &image, const double &u, const double &v, double &x, double &y)
Conversion for 2D post-scan images.
void convert(const usImagePostScan2D< unsigned char > &imageToConvert, usImagePreScan2D< unsigned char > &imageConverted, int preScanSamples)
void convert(const usImagePreScan2D< unsigned char > &preScanImage, usImagePostScan2D< unsigned char > &postScanImage, double xResolution=0., double yResolution=0.)
Process a pre-scan image to determine the confidence map.
void run(usImagePreScan2D< unsigned char > &preScanConfidence, const usImagePreScan2D< unsigned char > &preScanImage)