#include <iostream>
#include <visp3/ustk_core/usConfig.h>
#include <visp3/core/vpImageConvert.h>
#include <visp3/core/vpMutex.h>
#include <visp3/core/vpThread.h>
#include <visp3/core/vpTime.h>
#include <visp3/gui/vpPlot.h>
#include <visp3/robot/vpRobotViper850.h>
#include <visp3/ustk_core/usImagePostScan2D.h>
#include <visp3/ustk_core/usImagePreScan2D.h>
#include <visp3/ustk_core/usPixelMeterConversion.h>
#include <visp3/ustk_core/usPostScanToPreScan2DConverter.h>
#include <visp3/ustk_core/usPreScanToPostScan2DConverter.h>
#include <visp3/ustk_grabber/usNetworkGrabberPreScan2D.h>
#include <visp3/ustk_confidence_map/usScanlineConfidence2D.h>
#include <visp3/ustk_template_tracking/usDenseTracker2D.h>
#if defined(VISP_HAVE_V4L2) && defined(VISP_HAVE_PTHREAD) && defined(VISP_HAVE_VIPER850) && \
defined(VISP_HAVE_MODULE_USTK_GRABBER)
#include <QApplication>
#include <visp3/gui/vpDisplayX.h>
typedef enum { capture_waiting, capture_started, capture_stopped } t_CaptureState;
t_CaptureState s_capture_state = capture_waiting;
vpMutex s_mutex_capture;
typedef enum { ROI_waiting, ROI_started } t_stateInitROI;
t_stateInitROI s_ROI_state = ROI_waiting;
vpMutex s_mutex_image;
vpMutex s_mutex_ROI_state;
bool s_robotIsRunning;
vpMutex s_mutex_control_velocity;
vpColVector s_controlVelocity(6, 0.);
vpThread::Return displayFunction(vpThread::Args args)
{
(void)args;
vpRectOriented rectangle;
bool rectangleRoiDefined = false;
t_stateInitROI stateROI = ROI_waiting;
t_CaptureState capture_state_;
bool display_initialized_ = false;
#if defined(VISP_HAVE_X11)
vpDisplayX *dpost_scan_ = NULL;
vpDisplayX *dpost_scan_confidence_ = NULL;
#endif
vpPlot plot(2);
plot.initGraph(0, 1);
plot.setTitle(0, "X target error");
plot.setUnitY(0, "error");
plot.setLegend(0, 0, "time");
plot.initGraph(1, 1);
plot.setTitle(1, "confidence barycenter error");
plot.setUnitY(1, "error");
plot.setLegend(1, 0, "time");
double xtarget;
double ytarget;
vpImagePoint ROI_centralPoint;
double startTime = vpTime::measureTimeMs();
do {
s_mutex_capture.lock();
capture_state_ = s_capture_state;
s_mutex_capture.unlock();
if (capture_state_ == capture_started) {
{
vpMutex::vpScopedLock lock(s_mutex_image);
postScan_ = s_frame;
}
if (rectangleRoiDefined && stateROI == ROI_waiting) {
tracker.
init(postScan_, rectangle);
s_mutex_ROI_state.lock();
s_ROI_state = ROI_started;
s_mutex_ROI_state.unlock();
stateROI = ROI_started;
}
backConverter_.
convert(postScan_, preScan_, 480);
confidenceMapProcessor_.
run(confidencePreScan_, preScan_);
converter_.
convert(confidencePreScan_, confidencePostScan_);
if (stateROI == ROI_started) {
xtarget, ytarget);
double ttarget = atan2(xtarget, ytarget);
double time = (vpTime::measureTimeMs() - startTime) / 1000.0;
unsigned int height(confidencePreScan_.getHeight()), width(confidencePreScan_.getWidth());
double I_sum = 0.0;
for (unsigned int i = 0; i < height; ++i)
for (unsigned int j = 0; j < width; ++j)
I_sum += static_cast<double>(confidencePreScan_[i][j]);
double yc = 0.0;
for (unsigned int x = 0; x < height; ++x)
for (unsigned int y = 0; y < width; ++y) {
yc += y * confidencePreScan_[x][y];
}
yc /= I_sum;
plot.plot(0, 0, time, xtarget);
plot.plot(1, 0, time, vpMath::deg(tc));
{
vpMutex::vpScopedLock lock(s_mutex_control_velocity);
double lambda_t = 0.8;
double lambda_c = 0.8;
s_controlVelocity = 0.0;
s_controlVelocity[1] = -lambda_t * xtarget + lambda_c * (tc - ttarget) * ytarget;
s_controlVelocity[3] = lambda_c * (tc - ttarget);
}
}
if (!display_initialized_) {
#if defined(VISP_HAVE_X11)
unsigned int xpos = 10;
dpost_scan_ = new vpDisplayX(postScan_, xpos, 10, "post-scan");
xpos += 300;
dpost_scan_confidence_ = new vpDisplayX(confidencePostScan_, xpos, 10, "post-scan confidence");
display_initialized_ = true;
#endif
}
vpDisplay::display(postScan_);
if (rectangleRoiDefined) {
vpDisplay::displayLine(postScan_, static_cast<int>(rectangle.getTopLeft().get_i()),
static_cast<int>(rectangle.getTopLeft().get_j()),
static_cast<int>(rectangle.getBottomLeft().get_i()),
static_cast<int>(rectangle.getBottomLeft().get_j()), vpColor::red);
vpDisplay::displayLine(postScan_, static_cast<int>(rectangle.getBottomLeft().get_i()),
static_cast<int>(rectangle.getBottomLeft().get_j()),
static_cast<int>(rectangle.getBottomRight().get_i()),
static_cast<int>(rectangle.getBottomRight().get_j()), vpColor::red);
vpDisplay::displayLine(postScan_, static_cast<int>(rectangle.getBottomRight().get_i()),
static_cast<int>(rectangle.getBottomRight().get_j()),
static_cast<int>(rectangle.getTopRight().get_i()),
static_cast<int>(rectangle.getTopRight().get_j()), vpColor::red);
vpDisplay::displayLine(postScan_, static_cast<int>(rectangle.getTopRight().get_i()),
static_cast<int>(rectangle.getTopRight().get_j()),
static_cast<int>(rectangle.getTopLeft().get_i()),
static_cast<int>(rectangle.getTopLeft().get_j()), vpColor::red);
}
vpDisplay::display(confidencePostScan_);
vpDisplay::displayText(postScan_, 10, 10, "Click to define ROI...", vpColor::red);
if (vpDisplay::getClick(postScan_, ROI_centralPoint, false)) {
if (stateROI == ROI_waiting) {
rectangle.setCenter(vpImagePoint(ROI_centralPoint.get_v(), ROI_centralPoint.get_u()));
rectangle.setSize(80, 50);
rectangle.setOrientation(0);
rectangleRoiDefined = true;
}
}
vpDisplay::displayText(confidencePostScan_, 10, 10, "Click to exit...", vpColor::red);
if (vpDisplay::getClick(confidencePostScan_, false)) {
vpMutex::vpScopedLock lock(s_mutex_capture);
s_capture_state = capture_stopped;
}
vpDisplay::flush(postScan_);
vpDisplay::flush(confidencePostScan_);
} else {
vpTime::wait(2);
}
} while (capture_state_ != capture_stopped);
#if defined(VISP_HAVE_X11)
delete dpost_scan_;
delete dpost_scan_confidence_;
#endif
std::cout << "End of display thread" << std::endl;
return 0;
}
vpThread::Return controlFunction(vpThread::Args args)
{
(void)args;
vpRobotViper850 robot;
vpMatrix eJe;
vpHomogeneousMatrix eMs;
eMs[2][3] = -0.062;
vpHomogeneousMatrix sMp;
vpHomogeneousMatrix sMe;
eMs.inverse(sMe);
vpVelocityTwistMatrix sVe;
sVe.buildFrom(sMe);
vpColVector sHs(6);
vpColVector sHs_star(6);
vpColVector pHp_star(6);
vpColVector gHg(6);
vpMatrix lambda(6, 6);
vpTranslationVector stg;
vpColVector sHs_bias(6);
vpColVector v_s(6);
vpColVector q_dot(6);
lambda = 0;
for (int i = 0; i < 3; i++)
lambda[i][i] = 0.001;
for (int i = 3; i < 6; i++)
lambda[i][i] = 0.005;
pHp_star = 0;
pHp_star[2] = 3;
sMp[2][3] = 0.262;
gHg[2] = -(0.696 + 0.476) * 9.81;
stg[0] = 0;
stg[1] = 0;
stg[2] = 0.088;
vpRotationMatrix sRp;
sMp.extract(sRp);
vpTranslationVector stp;
sMp.extract(stp);
vpHomogeneousMatrix eMp = eMs * sMp;
vpVelocityTwistMatrix eVp(eMp);
vpColVector q;
vpHomogeneousMatrix fMe;
vpHomogeneousMatrix fMs;
vpRotationMatrix sRf;
robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
robot.get_fMe(q, fMe);
fMs = fMe * eMs;
vpHomogeneousMatrix sMf;
fMs.inverse(sMf);
sMf.extract(sRf);
vpForceTwistMatrix sFg(sMf);
for (int i = 0; i < 3; i++)
for (int j = 0; j < 3; j++)
sFg[i + 3][j] = (stg.skew() * sRf)[i][j];
vpForceTwistMatrix sFp(sMp);
std::cout << "\nBias the force/torque sensor...\n " << std::endl;
robot.biasForceTorqueSensor();
robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
int iter = 0;
t_CaptureState capture_state_;
t_stateInitROI ROI_state = ROI_waiting;
std::cout << "Waiting ROI definition to start control loop..." << std::endl;
while (ROI_state == ROI_waiting) {
vpMutex::vpScopedLock lock(s_mutex_ROI_state);
ROI_state = s_ROI_state;
s_mutex_capture.lock();
capture_state_ = s_capture_state;
s_mutex_capture.unlock();
if (capture_state_ == capture_stopped) {
std::cout << "end of control thread" << std::endl;
return 0;
}
}
std::cout << "Starting control loop..." << std::endl;
do {
s_mutex_capture.lock();
capture_state_ = s_capture_state;
s_mutex_capture.unlock();
if (capture_state_ == capture_started) {
sHs = robot.getForceTorque();
sHs *= -1;
robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
robot.get_fMe(q, fMe);
fMs = fMe * eMs;
fMs.inverse(sMf);
sMf.extract(sRf);
sFg.buildFrom(sMf);
for (int i = 0; i < 3; i++)
for (int j = 0; j < 3; j++)
sFg[i + 3][j] = (stg.skew() * sRf)[i][j];
if (iter == 0) {
sHs_bias = sHs - sFg * gHg;
}
vpColVector v_p;
{
vpMutex::vpScopedLock lock(s_mutex_control_velocity);
v_p = s_controlVelocity;
}
v_p[0] = 0;
v_p[2] = 0;
v_p[4] = 0;
v_p[5] = 0;
v_s = lambda * (sFp * pHp_star - (sHs - sFg * gHg - sHs_bias));
v_s[0] = 0.0;
v_s[1] = 0.0;
v_s[3] = 0.0;
v_s[4] = 0.0;
v_s[5] = 0.0;
vpVelocityTwistMatrix eVs;
sVe.inverse(eVs);
vpColVector v_e = eVs * v_s + eVp * v_p;
robot.get_eJe(eJe);
q_dot = eJe.pseudoInverse() * v_e;
robot.setVelocity(vpRobot::ARTICULAR_FRAME, q_dot);
iter++;
}
vpTime::wait(1);
} while (capture_state_ != capture_stopped);
std::cout << "End of control thread" << std::endl;
return 0;
}
int main(int argc, char **argv)
{
QApplication app(argc, argv);
s_robotIsRunning = false;
s_controlVelocity = vpColVector(6, 0);
vpThread thread_display((vpThread::Fn)displayFunction);
vpThread thread_control((vpThread::Fn)controlFunction);
bool stop_capture_ = false;
t_CaptureState capture_state_local = capture_waiting;
while (!stop_capture_) {
s_mutex_capture.lock();
capture_state_local = s_capture_state;
s_mutex_capture.unlock();
if (capture_state_local == capture_stopped)
stop_capture_ = true;
grabbedImage = qtGrabber->
acquire();
converter.
convert(*(grabbedImage), postScan_local);
s_mutex_image.lock();
s_frame = postScan_local;
s_mutex_image.unlock();
s_mutex_capture.lock();
s_capture_state = capture_started;
s_mutex_capture.unlock();
}
}
thread_display.join();
thread_control.join();
app.quit();
return 0;
}
#else
int main()
{
#ifndef VISP_HAVE_V4L2
std::cout << "You should enable V4L2 to make this example working..." << std::endl;
#elif !defined(_WIN32) && (defined(__unix__) || defined(__unix) || (defined(__APPLE__) && defined(__MACH__)))
std::cout << "You should enable pthread usage and rebuild ViSP..." << std::endl;
#else
std::cout << "Multi-threading seems not supported on this platform" << std::endl;
#endif
#ifndef VISP_HAVE_VIPER850
std::cout << "You need viper 850 robot to run this example" << std::endl;
#endif
}
#endif
void update(const vpImage< unsigned char > &I)
Tracking method, to call at every new frame to track.
void init(const vpImage< unsigned char > &I, const vpRectOriented &R)
Initialisation of the tracker : to call to set the region to track (R) in the image (I) before starti...
vpRectOriented getTarget() const
To call after update() at each new frame, to get the position of the ROI in the last acquired frame.
Specific class to grab pre-scan frames from the ultrasound station on the network.
bool isFirstFrameAvailable()
usFrameGrabbedInfo< usImagePreScan2D< unsigned char > > * acquire()
bool initAcquisition(const usNetworkGrabber::usInitHeaderSent &header)
bool sendAcquisitionParameters()
void setMotorPosition(int motorPosition)
static void convert(const usImagePostScan2D< unsigned char > &image, const double &u, const double &v, double &x, double &y)
Conversion for 2D post-scan images.
void convert(const usImagePostScan2D< unsigned char > &imageToConvert, usImagePreScan2D< unsigned char > &imageConverted, int preScanSamples)
void convert(const usImagePreScan2D< unsigned char > &preScanImage, usImagePostScan2D< unsigned char > &postScanImage, double xResolution=0., double yResolution=0.)
Process a pre-scan image to determine the confidence map.
void run(usImagePreScan2D< unsigned char > &preScanConfidence, const usImagePreScan2D< unsigned char > &preScanImage)
double getFieldOfView() const
double getScanLinePitch() const