UsTK : Ultrasound ToolKit  version 2.0.1 under development (2025-02-01)
tutorial-ultrasonix-qt-grabbing-pre-scan-confidence-control.cpp
1 
3 #include <iostream>
4 #include <visp3/ustk_core/usConfig.h>
5 
6 #if (defined(USTK_HAVE_QT5) || defined(USTK_HAVE_VTK_QT)) && (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && \
7  defined(VISP_HAVE_VIPER850)
8 
9 #include <QApplication>
10 #include <QStringList>
11 #include <QtCore/QThread>
12 
13 #include <visp3/ustk_grabber/usNetworkGrabberPreScan2D.h>
14 
15 #include <visp3/ustk_confidence_map/usScanlineConfidence2D.h>
16 
17 #include <visp3/gui/vpDisplayX.h>
18 #include <visp3/robot/vpRobotViper850.h>
19 
20 // Shared vars
21 
22 vpMutex s_mutex_control_velocity;
23 vpColVector s_controlVelocity(6, 0.);
24 
25 vpMutex s_mutex_capture;
26 typedef enum { capture_waiting, capture_started, capture_stopped } t_CaptureState;
27 t_CaptureState s_capture_state = capture_waiting;
28 
30 vpThread::Return controlFunction(vpThread::Args args)
31 {
32  (void)args;
33  vpRobotViper850 robot;
34 
35  vpMatrix eJe; // robot jacobian
36 
37  // Transformation from end-effector frame to the force/torque sensor
38  // Note that the end-effector frame is located on the lower part of
39  // male component of the tool changer.
40  vpHomogeneousMatrix eMs;
41  eMs[2][3] = -0.062; // tz = -6.2cm
42 
43  // Transformation from force/torque sensor to the probe frame from where
44  // we want to control the robot
45  vpHomogeneousMatrix sMp;
46 
47  // Transformation from force/torque sensor to the end-effector frame
48  vpHomogeneousMatrix sMe;
49  eMs.inverse(sMe);
50 
51  // Build the transformation that allows to convert a velocity in the
52  // end-effector frame to the FT sensor frame
53  vpVelocityTwistMatrix sVe;
54  sVe.buildFrom(sMe);
55 
56  vpColVector sHs(6); // force/torque sensor measures
57  vpColVector sHs_star(6); // force/torque sensor desired values in sensor frame
58  vpColVector pHp_star(6); // force/torque sensor desired values in probe frame
59  vpColVector gHg(6); // force/torque due to the gravity
60  vpMatrix lambdaProportionnal(6, 6); // gain of proportionnal part of the force controller
61  vpMatrix lambdaDerivate(6, 6); // gain of derivate part of the force controller
62  vpMatrix lambdaIntegral(6, 6); // gain of derivate part of the force controller
63  vpColVector sEs(6, 0); // force/torque error
64  vpColVector sEs_last(6, 0); // force/torque error
65  vpColVector sEs_sum(6, 0); // force/torque sum error
66  // Position of the cog of the object attached after the sensor in the sensor frame
67  vpTranslationVector stg;
68  vpColVector sHs_bias(6); // force/torque sensor measures for bias
69 
70  // Cartesian velocities corresponding to the force/torque control in the
71  // sensor frame
72  vpColVector v_s(6);
73  // Joint velocities corresponding to the force/torque control
74  vpColVector q_dot(6);
75 
77  // Initialized the force gains, for proportionnal component
78  lambdaProportionnal = 0;
79  for (int i = 0; i < 3; i++)
80  lambdaProportionnal[i][i] = 0.015;
81  // Initialized the torque gains, for proportionnal component
82  for (int i = 3; i < 6; i++)
83  lambdaProportionnal[i][i] = 0;
84  // Initialized the force gains, for derivate component
85  lambdaDerivate = 0;
86  for (int i = 0; i < 3; i++)
87  lambdaDerivate[i][i] = 0.09;
88  // Initialized the torque gains, for derivate component
89  for (int i = 3; i < 6; i++)
90  lambdaDerivate[i][i] = 0;
91  // Initialized the force gains, for integral component
92  lambdaIntegral = 0;
93  for (int i = 0; i < 3; i++)
94  lambdaIntegral[i][i] = 0;
95  // Initialized the torque gains, for integral component
96  for (int i = 3; i < 6; i++)
97  lambdaIntegral[i][i] = 0;
98 
99  // Initialize the desired force/torque values
100  pHp_star = 0;
101  pHp_star[1] = 4; // Fy = 4N
102  //
103  // Case of the C65 US probe
104  //
105  // Set the probe frame control
106  sMp[0][0] = 0;
107  sMp[1][1] = 0;
108  sMp[2][2] = 0;
109  sMp[0][2] = 1; // Z in force sensor becomes X in the probe control frame
110  sMp[1][0] = 1; // X in force sensor becomes Y in the probe control frame
111  sMp[2][1] = 1; // Y in force sensor becomes Z in the probe control frame
112  sMp[1][3] = 0.262; // ty = 26.2cm
113 
114  // Init the force/torque due to the gravity
115  gHg[2] = -(0.696 + 0.476) * 9.81; // m*g
116 
117  // Position of the cog of the object attached after the sensor in the sensor frame
118  stg[0] = 0;
119  stg[1] = 0;
120  stg[2] = 0.088; // tz = 88.4mm
121 
122  vpHomogeneousMatrix eMp = eMs * sMp;
123 
124  vpVelocityTwistMatrix eVp(eMp);
125 
126  // Get the position of the end-effector in the reference frame
127  vpColVector q;
128  vpHomogeneousMatrix fMe;
129  vpHomogeneousMatrix fMs;
130  vpRotationMatrix sRf;
131  robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
132  robot.get_fMe(q, fMe);
133  // Compute the position of the sensor frame in the reference frame
134  fMs = fMe * eMs;
135  vpHomogeneousMatrix sMf;
136  fMs.inverse(sMf);
137  sMf.extract(sRf);
138 
139  // Build the transformation that allows to convert the forces due to the
140  // gravity in the sensor frame
141  vpForceTwistMatrix sFg(sMf); // Only the rotation part is to consider
142  // Modify the translational part
143  for (int i = 0; i < 3; i++)
144  for (int j = 0; j < 3; j++)
145  sFg[i + 3][j] = (stg.skew() * sRf)[i][j];
146 
147  // Build the transformation that allows to convert a FT expressed in the
148  // FT probe frame into the sensor frame
149  vpForceTwistMatrix sFp(sMp);
150 
151  // Bias the force/torque sensor
152  std::cout << "\nBias the force/torque sensor...\n " << std::endl;
153  robot.biasForceTorqueSensor();
154 
155  // Set the robot to velocity control
156  robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
157 
158  unsigned int iter = 0;
159  t_CaptureState capture_state_;
160 
161  // signal filtering
162  unsigned int bufferSize = 80;
163  double signalBuffer[bufferSize];
164  for (unsigned int i = 0; i < bufferSize; i++)
165  signalBuffer[i] = 0;
166 
167  std::cout << "Starting control loop..." << std::endl;
168  double t0 = vpTime::measureTimeMs();
169  double deltaTmilliseconds = 1; // 1ms for each loop cycle
170  do {
171  t0 = vpTime::measureTimeMs();
172  s_mutex_capture.lock();
173  capture_state_ = s_capture_state;
174  s_mutex_capture.unlock();
175 
176  // Check if a frame is available
177  if (capture_state_ == capture_started) {
178 
179  // Get the force/torque measures from the sensor
180  sHs = robot.getForceTorque();
181 
182  // Multiply the measures by -1 to get the force/torque exerced by the
183  // robot to the environment.
184  sHs *= -1;
185 
186  // Update the gravity transformation matrix
187  robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
188  robot.get_fMe(q, fMe);
189  // Compute the position of the sensor frame in the reference frame
190  fMs = fMe * eMs;
191  // Compute the inverse transformation
192  fMs.inverse(sMf);
193  sMf.extract(sRf);
194  // Update the transformation that allows to convert the forces due to the
195  // gravity in the sensor frame
196  sFg.buildFrom(sMf); // Only the rotation part is to consider
197  // Modify the translational part
198  for (int i = 0; i < 3; i++)
199  for (int j = 0; j < 3; j++)
200  sFg[i + 3][j] = (stg.skew() * sRf)[i][j];
201 
202  if (iter == 0) {
203  sHs_bias = sHs - sFg * gHg;
204  }
205 
206  // Compute rotation in probe frame from control velocity deduced of the confidence map barycenter
207  vpColVector v_p;
208  {
209  vpMutex::vpScopedLock lock(s_mutex_control_velocity);
210  v_p = s_controlVelocity;
211  }
212 
213  v_p[0] = 0;
214  v_p[1] = 0;
215  v_p[2] = 0;
216  v_p[3] = 0;
217  v_p[4] = 0;
218 
219  // save last error for derivate part of the controller
220  sEs_last = sEs;
221 
222  // Compute the force/torque error in the sensor frame
223  sEs = sFp * pHp_star - (sHs - sFg * gHg - sHs_bias);
224 
225  // fill filtering buffer
226  signalBuffer[iter % bufferSize] = sEs[2];
227 
228  // filter
229  double sum = 0;
230  unsigned int realBufferSize = iter + 1 < bufferSize ? iter + 1 : bufferSize;
231  for (unsigned int i = 0; i < realBufferSize; i++) {
232  sum += signalBuffer[i];
233  }
234  sEs[2] = sum / realBufferSize;
235 
236  sEs_sum += sEs;
237 
238  // to avoid hudge derivate error at first iteration, we set the derivate error to 0
239  if (iter == 0) {
240  sEs_last = sEs;
241  }
242 
243  // Compute the force/torque control law in the sensor frame (propotionnal + derivate controller)
244  v_s =
245  lambdaProportionnal * sEs + lambdaDerivate * (sEs - sEs_last) / deltaTmilliseconds + lambdaIntegral * sEs_sum;
246 
247  v_s[0] = 0.0;
248  v_s[1] = 0.0;
249  v_s[3] = 0.0;
250  v_s[4] = 0.0;
251  v_s[5] = 0.0;
252 
253  vpVelocityTwistMatrix eVs;
254  sVe.inverse(eVs);
255 
256  vpColVector v_e = eVs * v_s + eVp * v_p;
257 
258  // Get the robot jacobian eJe
259  robot.get_eJe(eJe);
260 
261  // Compute the joint velocities to achieve the force/torque control
262  q_dot = eJe.pseudoInverse() * v_e;
263 
264  // Send the joint velocities to the robot
265  robot.setVelocity(vpRobot::ARTICULAR_FRAME, q_dot);
266 
267  iter++;
268  }
269  vpTime::wait(t0, deltaTmilliseconds);
270  } while (capture_state_ != capture_stopped);
271 
272  std::cout << "End of control thread" << std::endl;
273  return 0;
274 }
275 
276 int main(int argc, char **argv)
277 {
278  // QT application
279  QApplication app(argc, argv);
280 
282  qtGrabber->connectToServer();
283 
284  // setting acquisition parameters
286  if (qApp->arguments().contains(QString("--probeID"))) {
287  header.probeId = qApp->arguments().at(qApp->arguments().indexOf(QString("--probeID")) + 1).toInt();
288  } else
289  header.probeId = 15; // 4DC7 id = 15 by default
290 
291  if (qApp->arguments().contains(QString("--slotID"))) {
292  header.slotId = qApp->arguments().at(qApp->arguments().indexOf(QString("--slotID")) + 1).toInt();
293  } else
294  header.slotId = 0; // top slot id = 0 by default
295 
296  if (qApp->arguments().contains(QString("--imagingMode"))) {
297  header.imagingMode = qApp->arguments().at(qApp->arguments().indexOf(QString("--imagingMode")) + 1).toInt();
298  } else
299  header.imagingMode = 0; // B-mode = 0 by default
300 
301  // prepare image;
305 
306  // gain
307  double lambdaVisualError = 5;
308 
309  // Prepare display
310  vpDisplay *display = NULL;
311  vpDisplay *displayConf = NULL;
312  bool displayInit = false;
313 
314  // prepare confidence
315  usScanlineConfidence2D confidenceProcessor;
316 
317  bool captureRunning = true;
318 
319  // sending acquisition parameters
320  qtGrabber->initAcquisition(header);
321 
322  qtGrabber->setMotorPosition(32); // middle
323  qtGrabber->sendAcquisitionParameters();
324 
325  qtGrabber->runAcquisition();
326 
327  // start robot control thread
328  vpThread thread_control((vpThread::Fn)controlFunction);
329 
330  std::cout << "waiting ultrasound initialisation..." << std::endl;
331 
332  // our local grabbing loop
333  do {
334  grabbedFrame = qtGrabber->acquire();
335  confidenceProcessor.run(confidence, *grabbedFrame);
336 
337  // Confidence barycenter computation
338 
339  // sum first
340  double I_sum = 0.0;
341  for (unsigned int i = 0; i < confidence.getHeight(); ++i)
342  for (unsigned int j = 0; j < confidence.getWidth(); ++j)
343  I_sum += static_cast<double>(confidence[i][j]);
344 
345  double yc = 0.0;
346  for (unsigned int x = 0; x < confidence.getHeight(); ++x)
347  for (unsigned int y = 0; y < confidence.getWidth(); ++y) {
348  yc += y * confidence[x][y];
349  }
350  yc /= I_sum;
351 
352  double tc = yc * confidence.getScanLinePitch() - confidence.getFieldOfView() / 2.0;
353 
354  {
355  vpMutex::vpScopedLock lock(s_mutex_control_velocity);
356  s_controlVelocity = 0.0;
357  s_controlVelocity[5] = lambdaVisualError * tc;
358  }
359 
360  s_mutex_capture.lock();
361  s_capture_state = capture_started;
362  s_mutex_capture.unlock();
363 
364  // std::cout << "MAIN THREAD received frame No : " << grabbedFrame->getFrameCount() << std::endl;
365 
366  // init display
367  if (!displayInit && grabbedFrame->getHeight() != 0 && grabbedFrame->getWidth() != 0) {
368 #ifdef VISP_HAVE_X11
369  display = new vpDisplayX(*grabbedFrame);
370  displayConf = new vpDisplayX(confidence, (*grabbedFrame).getWidth() + 60, 10);
371 #elif defined(VISP_HAVE_GDI)
372  display = new vpDisplayGDI(*grabbedFrame);
373  displayConf = new vpDisplayGDI(confidence, (*grabbedFrame).getWidth() + 60, 10);
374 #endif
375  qtGrabber->useVpDisplay(display);
376  displayInit = true;
377  }
378 
379  // processing display
380  if (displayInit) {
381  vpDisplay::display(*grabbedFrame);
382  vpDisplay::display(confidence);
383 
384  // display target barycenter (image center)
385  vpDisplay::displayText(confidence, 10, 10, "target", vpColor::green);
386  vpDisplay::displayLine(confidence, 0, confidence.getWidth() / 2 - 1, confidence.getHeight() - 1,
387  confidence.getWidth() / 2 - 1, vpColor::green);
388 
389  // dispay current confidence barycenter
390  vpDisplay::displayText(confidence, 25, 10, "current", vpColor::red);
391  vpDisplay::displayLine(confidence, 0, static_cast<int>(yc), confidence.getHeight() - 1, static_cast<int>(yc),
392  vpColor::red);
393  if (vpDisplay::getClick(confidence, false))
394  captureRunning = false;
395  if (vpDisplay::getClick(*grabbedFrame, false))
396  captureRunning = false;
397 
398  vpDisplay::flush(*grabbedFrame);
399  vpDisplay::flush(confidence);
400  vpTime::wait(10); // wait to simulate a local process running on last frame frabbed
401  }
402  } while (captureRunning);
403 
404  qtGrabber->stopAcquisition();
405 
406  std::cout << "stop capture" << std::endl;
407  if (displayInit) {
408  if (display)
409  delete display;
410 
411  if (displayConf)
412  delete displayConf;
413  }
414 
415  // move up the robot arm
416  {
417  vpMutex::vpScopedLock lock(s_mutex_control_velocity);
418  s_controlVelocity = 0.0;
419  s_controlVelocity[5] = -0.05; // move up in probe frame
420  }
421  vpTime::wait(500);
422  {
423  vpMutex::vpScopedLock lock(s_mutex_control_velocity);
424  s_controlVelocity = 0.0;
425  }
426 
427  s_mutex_capture.lock();
428  s_capture_state = capture_stopped;
429  s_mutex_capture.unlock();
430  thread_control.join();
431 
432  // end grabber
433  qtGrabber->disconnect();
434  app.quit();
435 
436  std::cout << "end main thread" << std::endl;
437 
438  return 0;
439 }
440 
441 #else
442 int main()
443 {
444  std::cout << "You should intall Qt5 (with wigdets and network modules), and display X to run this tutorial"
445  << std::endl;
446  return 0;
447 }
448 
449 #endif
Class to store additionnal informations arriving on the network with ultrasound images grabbed,...
Specific class to grab pre-scan frames from the ultrasound station on the network.
usFrameGrabbedInfo< usImagePreScan2D< unsigned char > > * acquire()
void useVpDisplay(vpDisplay *display)
bool initAcquisition(const usNetworkGrabber::usInitHeaderSent &header)
void setMotorPosition(int motorPosition)
Process a pre-scan image to determine the confidence map.
void run(usImagePreScan2D< unsigned char > &preScanConfidence, const usImagePreScan2D< unsigned char > &preScanImage)
VISP_EXPORT void display(const usOrientedPlane3D &plane, const vpImage< ImageDataType > &I, const vpHomogeneousMatrix &imageMworld, double Xscale=3000, double Yscale=3000, const vpColor &color=vpColor::green)
Display usOrientedPlane3D.