31 string moduleName = rf.check(
"name", Value(
"faceTracker"),
"module name (string)").asString();
33 setName(moduleName.c_str());
35 string xmlPath = rf.findFileByName(
"haarcascade_frontalface_default.xml");
37 string imagePortName = rf.check(
"imagePortName", Value(
"/icub/camcalib/left/out")).asString();
40 if (!handlerPort.open(
"/" + getName() +
"/rpc")) {
41 yError() << getName() <<
": Unable to open port " << handlerPort.getName();
45 imagePortLeft.open(
"/" + getName() +
"/image/in");
47 while(!Network::connect(imagePortName, imagePortLeft.getName()))
50 yDebug() <<
"try to connect left camera, please wait ...";
54 options.put(
"device",
"remote_controlboard");
55 options.put(
"local",
"/facetracker/motor/client");
56 options.put(
"remote",
"/icub/head");
58 robotHead =
new PolyDriver(options);
60 if(!robotHead->isValid())
62 yError() <<
"Cannot connect to the robot head";
68 robotHead->view(ictrl);
70 if(vel==
nullptr || enc==
nullptr || ictrl==
nullptr)
72 yError() <<
"Cannot get interface to robot head";
77 vel->getAxes(&nr_jnts);
79 velocity_command.resize(nr_jnts);
80 cur_encoders.resize(nr_jnts);
83 for(
int i=0;i<nr_jnts;i++)
85 velocity_command[i] = 0;
88 for(
int i=0; i<nr_jnts; i++)
90 ictrl->setControlMode(i,VOCAB_CM_VELOCITY);
95 cv::namedWindow(
"cvImage_Left",CV_WINDOW_NORMAL);
99 face_classifier_left.load(xmlPath);
105 counter_no_face_found = 0;
109 mode = Mode::GO_DEFAULT;
124 cvIplImageLeft =
nullptr;
131 yInfo() <<
"Interrupting module for port cleanup";
132 handlerPort.interrupt();
138 yInfo() <<
"Calling close function";
139 handlerPort.interrupt();
142 cvReleaseImage(&cvIplImageLeft);
150 string helpMessage = string(getName().c_str()) +
151 " commands are: \n" +
157 if (command.get(0).asString()==
"quit") {
158 reply.addString(
"quitting");
160 }
else if (command.get(0).asString()==
"help") {
161 yInfo() << helpMessage;
162 reply.addString(
"ok");
163 }
else if (command.get(0).asString()==
"pause") {
164 reply.addString(
"ack");
166 }
else if (command.get(0).asString()==
"resume") {
167 reply.addString(
"ack");
168 mode = Mode::FACE_SEARCHING;
181 if (setpos_counter < 100)
183 velocity_command[0] = (0-cur_encoders[0])*0.3;
184 velocity_command[1] = (0-cur_encoders[1])*0.3;
185 velocity_command[2] = (0-cur_encoders[2])*0.3;
186 velocity_command[3] = (0-cur_encoders[3])*0.3;
187 velocity_command[4] = (0-cur_encoders[4])*0.3;
193 yDebug() <<
"Going to the set position is DONE!";
195 velocity_command[0] = 0;
196 velocity_command[2] = 0;
197 velocity_command[3] = 0;
198 velocity_command[4] = 0;
200 yInfo() <<
"Switch to face searching mode!";
201 mode = Mode::FACE_SEARCHING;
204 vel->velocityMove(velocity_command.data());
210 yInfo() <<
"I found a face! Switch to face tracking mode";
211 mode = Mode::FACE_TRACKING;
216 velocity_command[0] = (tilt_target-cur_encoders[0])*0.3;
217 velocity_command[1] = (0-cur_encoders[1])*0.3;
218 velocity_command[2] = (pan_target-cur_encoders[2])*0.3;
219 velocity_command[3] = (0-cur_encoders[3])*0.3;
220 velocity_command[4] = (0-cur_encoders[4])*0.3;
222 if ((abs(tilt_target - cur_encoders[0]) < 1) && (abs(pan_target - cur_encoders[2]) < 1))
224 double pan_r = ((double)rand() / ((double)(RAND_MAX)+(double)(1)));
225 double tilt_r = ((double)rand() / ((double)(RAND_MAX)+(double)(1)));
227 pan_target = (int)((pan_r*pan_max)-(pan_max/2));
228 tilt_target = (int)((tilt_r*tilt_max)-(tilt_max/2));
232 vel->velocityMove(velocity_command.data());
236 if(faces_left.size() > 0)
238 double x = 320-(faces_left[biggest_face_left_idx].x + faces_left[biggest_face_left_idx].width/2);
239 double y = 240-(faces_left[biggest_face_left_idx].y + faces_left[biggest_face_left_idx].height/2);
248 for(
int i=0;i<nr_jnts;i++)
250 velocity_command[i] = 0;
253 velocity_command[0] = vy;
254 velocity_command[2] = vx;
255 velocity_command[3] = vy;
256 velocity_command[4] = -vx;
260 counter_no_face_found = 0;
264 else if (faces_left.size() == 0 && counter_no_face_found < 10)
266 double vx = x_buf*0.3;
267 double vy = y_buf*0.3;
270 for(
int i=0;i<nr_jnts;i++)
272 velocity_command[i] = 0;
275 velocity_command[0] = vy;
276 velocity_command[2] = vx;
277 velocity_command[3] = vy;
278 velocity_command[4] = -vx;
280 counter_no_face_found++;
284 yInfo() <<
"Hey! I don't see any face.";
286 velocity_command[0] = 0;
287 velocity_command[2] = 0;
288 velocity_command[3] = 0;
289 velocity_command[4] = 0;
295 if(stuck_counter >= 10)
297 if(panning_counter > 5)
299 yDebug() <<
"Switch to default position mode!";
300 mode = Mode::GO_DEFAULT;
305 yDebug() <<
"Switch to face searching mode!";
306 mode = Mode::FACE_SEARCHING;
311 mode = Mode::FACE_TRACKING;
314 vel->velocityMove(velocity_command.data());
317 int faceTrackerModule::getBiggestFaceIdx(
const cv::Mat& cvMatImageLeft,
const std::vector<cv::Rect> &faces_left) {
318 int biggest_face_left_idx = 0;
319 int biggest_face_left_size_buf = 0;
321 for(
unsigned int i=0; i<faces_left.size(); i++)
323 cv::Point flb(faces_left[i].
x + faces_left[i].width,
324 faces_left[i].
y + faces_left[i].height);
325 cv::Point ftr(faces_left[i].
x, faces_left[i].
y);
327 cv::rectangle(cvMatImageLeft, flb, ftr, cv::Scalar(0,255,0), 3,4,0);
329 if(biggest_face_left_size_buf < faces_left[i].height)
331 biggest_face_left_size_buf = faces_left[i].height;
332 biggest_face_left_idx = i;
335 return biggest_face_left_idx;
341 ImageOf<PixelRgb> *yarpImageLeft = imagePortLeft.read();
343 if ( cvIplImageLeft ==
nullptr ) {
344 cvIplImageLeft = cvCreateImage(cvSize(yarpImageLeft->width(),yarpImageLeft->height()), IPL_DEPTH_8U,3);
348 cvCvtColor((IplImage*)yarpImageLeft->getIplImage(), cvIplImageLeft, CV_RGB2BGR);
349 cv::Mat cvMatImageLeft=cv::cvarrToMat(cvIplImageLeft);
351 if(yarpImageLeft!=
nullptr)
354 cv::resize(cvMatImageLeft, cvMatImageLeft, cv::Size(320, 240), 0,0,CV_INTER_NN);
357 cv::Mat cvMatGrayImageLeft;
358 cv::cvtColor(cvMatImageLeft, cvMatGrayImageLeft, CV_BGR2GRAY);
359 cv::equalizeHist(cvMatGrayImageLeft, cvMatGrayImageLeft);
363 std::vector<cv::Rect> faces_left;
365 face_classifier_left.detectMultiScale(cvMatGrayImageLeft, faces_left,
368 CV_HAAR_DO_CANNY_PRUNING,
372 int biggest_face_left_idx = getBiggestFaceIdx(cvMatImageLeft, faces_left);
374 yarp::sig::VectorOf<double> new_encoder_readings;
375 new_encoder_readings.resize(nr_jnts);
377 if(enc->getEncoders(new_encoder_readings.data())) {
378 cur_encoders = new_encoder_readings;
381 if (mode == Mode::GO_DEFAULT) {
382 moveToDefaultPosition();
383 }
else if (mode == Mode::FACE_SEARCHING) {
384 faceSearching(faces_left.size());
385 }
else if (mode == Mode::FACE_TRACKING) {
386 faceTracking(faces_left, biggest_face_left_idx);
387 }
else if (mode == Mode::PAUSED) {
388 yInfo() <<
"Not doing anything as module is paused";
389 yarp::os::Time::delay(0.5);
bool configure(yarp::os::ResourceFinder &rf)
configure all the module parameters and return true if successful
bool updateModule()
get an image from the robot's camera and decide which mode should be chosen next
bool respond(const yarp::os::Bottle &command, yarp::os::Bottle &reply)
void moveToDefaultPosition()
Move to center position (after being stuck)
void faceSearching(bool face_found)
Randomly move around.
bool close()
close and shut down the module
bool interruptModule()
interrupt the ports
void faceTracking(const std::vector< cv::Rect > &faces_left, int biggest_face_left_idx)
If a face was found, track it.