icub-client
faceTracker.cpp
Go to the documentation of this file.
1 /*
2 * Copyright (C) 2014 WYSIWYD Consortium, European Commission FP7 Project ICT-612139
3 * Authors: Hyung Jin Chang
4 * email: hj.chang@imperial.ac.uk
5 * website: http://wysiwyd.upf.edu/
6 * Permission is granted to copy, distribute, and/or modify this program
7 * under the terms of the GNU General Public License, version 2 or any
8 * later version published by the Free Software Foundation.
9 *
10 * A copy of the license can be found at
11 * icub-client/license/gpl.txt
12 *
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
16 * Public License for more details
17 */
18 
19 #include "faceTracker.h"
20 
21 using namespace yarp::os;
22 using namespace yarp::sig;
23 using namespace yarp::sig::draw;
24 using namespace yarp::sig::file;
25 using namespace yarp::dev;
26 using namespace std;
27 
28 /************************************************************************/
29 bool faceTrackerModule::configure(yarp::os::ResourceFinder &rf) {
30 
31  string moduleName = rf.check("name", Value("faceTracker"), "module name (string)").asString();
32 
33  setName(moduleName.c_str());
34 
35  string xmlPath = rf.findFileByName("haarcascade_frontalface_default.xml");
36 
37  string imagePortName = rf.check("imagePortName", Value("/icub/camcalib/left/out")).asString();
38 
39 
40  if (!handlerPort.open("/" + getName() + "/rpc")) {
41  yError() << getName() << ": Unable to open port " << handlerPort.getName();
42  return false;
43  }
44 
45  imagePortLeft.open("/" + getName() + "/image/in");
46 
47  while(!Network::connect(imagePortName, imagePortLeft.getName()))
48  {
49  Time::delay(3);
50  yDebug() << "try to connect left camera, please wait ...";
51  }
52 
53  Property options;
54  options.put("device", "remote_controlboard");
55  options.put("local", "/facetracker/motor/client");
56  options.put("remote", "/icub/head");
57 
58  robotHead = new PolyDriver(options);
59 
60  if(!robotHead->isValid())
61  {
62  yError() << "Cannot connect to the robot head";
63  return false;
64  }
65 
66  robotHead->view(vel);
67  robotHead->view(enc);
68  robotHead->view(ictrl);
69 
70  if(vel==nullptr || enc==nullptr || ictrl==nullptr)
71  {
72  yError() << "Cannot get interface to robot head";
73  robotHead->close();
74  return false;
75  }
76  nr_jnts = 0;
77  vel->getAxes(&nr_jnts);
78 
79  velocity_command.resize(nr_jnts);
80  cur_encoders.resize(nr_jnts);
81 
82  /* prepare command */
83  for(int i=0;i<nr_jnts;i++)
84  {
85  velocity_command[i] = 0;
86  }
87 
88  for(int i=0; i<nr_jnts; i++)
89  {
90  ictrl->setControlMode(i,VOCAB_CM_VELOCITY);
91  }
92 
93  // ==================================================================
95  cv::namedWindow("cvImage_Left",CV_WINDOW_NORMAL);
96 
97  // ==================================================================
98  // face detection configuration
99  face_classifier_left.load(xmlPath);
100 
101  attach(handlerPort); // attach to port
102 
103  // ==================================================================
104  // Parameters
105  counter_no_face_found = 0;
106  x_buf = 0;
107  y_buf = 0;
108 
109  mode = Mode::GO_DEFAULT;
110  setpos_counter = 0;
111  panning_counter = 0;
112  stuck_counter = 0;
113 
114  // ==================================================================
115  // random motion
116  tilt_target = 0;
117  pan_target = 0;
118 
119  int seed = 10000;
120  srand(seed);
121  pan_max = 80;
122  tilt_max = 20;
123 
124  cvIplImageLeft = nullptr;
125 
126  return true;
127 }
128 
129 /************************************************************************/
131  yInfo() << "Interrupting module for port cleanup";
132  handlerPort.interrupt();
133  return true;
134 }
135 
136 /************************************************************************/
138  yInfo() << "Calling close function";
139  handlerPort.interrupt();
140  handlerPort.close();
141 
142  cvReleaseImage(&cvIplImageLeft);
143  robotHead->close();
144 
145  return true;
146 }
147 
148 /************************************************************************/
149 bool faceTrackerModule::respond(const Bottle& command, Bottle& reply) {
150  string helpMessage = string(getName().c_str()) +
151  " commands are: \n" +
152  "help \n" +
153  "quit \n" ;
154 
155  reply.clear();
156 
157  if (command.get(0).asString()=="quit") {
158  reply.addString("quitting");
159  return false;
160  } else if (command.get(0).asString()=="help") {
161  yInfo() << helpMessage;
162  reply.addString("ok");
163  } else if (command.get(0).asString()=="pause") {
164  reply.addString("ack");
165  mode = Mode::PAUSED;
166  } else if (command.get(0).asString()=="resume") {
167  reply.addString("ack");
168  mode = Mode::FACE_SEARCHING;
169  }
170 
171  return true;
172 }
173 
174 /************************************************************************/
176  return 0.03;
177 }
178 
180  // Going to the set position mode
181  if (setpos_counter < 100)
182  {
183  velocity_command[0] = (0-cur_encoders[0])*0.3; // common tilt of head
184  velocity_command[1] = (0-cur_encoders[1])*0.3; // common roll of head
185  velocity_command[2] = (0-cur_encoders[2])*0.3; // common pan of head
186  velocity_command[3] = (0-cur_encoders[3])*0.3; // common tilt of eyes
187  velocity_command[4] = (0-cur_encoders[4])*0.3; // common pan of eyes
188 
189  setpos_counter++;
190  }
191  else
192  {
193  yDebug() << "Going to the set position is DONE!";
194 
195  velocity_command[0] = 0;
196  velocity_command[2] = 0;
197  velocity_command[3] = 0;
198  velocity_command[4] = 0;
199 
200  yInfo() << "Switch to face searching mode!";
201  mode = Mode::FACE_SEARCHING;
202  setpos_counter = 0;
203  }
204  vel->velocityMove(velocity_command.data());
205 }
206 
207 void faceTrackerModule::faceSearching(bool face_found) {
208  if(face_found > 0)
209  {
210  yInfo() << "I found a face! Switch to face tracking mode";
211  mode = Mode::FACE_TRACKING;
212  panning_counter++;
213  }
214  else
215  {
216  velocity_command[0] = (tilt_target-cur_encoders[0])*0.3; // common tilt of head
217  velocity_command[1] = (0-cur_encoders[1])*0.3; // common roll of head
218  velocity_command[2] = (pan_target-cur_encoders[2])*0.3; // common pan of head
219  velocity_command[3] = (0-cur_encoders[3])*0.3; // common tilt of eyes
220  velocity_command[4] = (0-cur_encoders[4])*0.3; // common pan of eyes
221 
222  if ((abs(tilt_target - cur_encoders[0]) < 1) && (abs(pan_target - cur_encoders[2]) < 1))
223  {
224  double pan_r = ((double)rand() / ((double)(RAND_MAX)+(double)(1)));
225  double tilt_r = ((double)rand() / ((double)(RAND_MAX)+(double)(1)));
226 
227  pan_target = (int)((pan_r*pan_max)-(pan_max/2));
228  tilt_target = (int)((tilt_r*tilt_max)-(tilt_max/2));
229  }
230  }
231 
232  vel->velocityMove(velocity_command.data());
233 }
234 
235 void faceTrackerModule::faceTracking(const std::vector<cv::Rect> &faces_left, int biggest_face_left_idx) {
236  if(faces_left.size() > 0)
237  {
238  double x = 320-(faces_left[biggest_face_left_idx].x + faces_left[biggest_face_left_idx].width/2);
239  double y = 240-(faces_left[biggest_face_left_idx].y + faces_left[biggest_face_left_idx].height/2);
240 
241  x -= 320/2;
242  y -= 240/2;
243 
244  double vx = x*0.3; // Not to move too fast
245  double vy = y*0.3;
246 
247  /* prepare command */
248  for(int i=0;i<nr_jnts;i++)
249  {
250  velocity_command[i] = 0;
251  }
252 
253  velocity_command[0] = vy; // common tilt of head
254  velocity_command[2] = vx; // common pan of head
255  velocity_command[3] = vy; // common tilt of eyes
256  velocity_command[4] = -vx; // common pan of eyes
257 
258  x_buf = x;
259  y_buf = y;
260  counter_no_face_found = 0;
261 
262  // stopping smoothly
263  }
264  else if (faces_left.size() == 0 && counter_no_face_found < 10)
265  {
266  double vx = x_buf*0.3; // Not to move too fast
267  double vy = y_buf*0.3;
268 
269  /* prepare command */
270  for(int i=0;i<nr_jnts;i++)
271  {
272  velocity_command[i] = 0;
273  }
274 
275  velocity_command[0] = vy; // common tilt of head
276  velocity_command[2] = vx; // common pan of head
277  velocity_command[3] = vy; // common tilt of eyes
278  velocity_command[4] = -vx; // common pan of eyes
279 
280  counter_no_face_found++;
281  }
282  else // faces_left.size() == 0 && counter>=10
283  {
284  yInfo() << "Hey! I don't see any face.";
285 
286  velocity_command[0] = 0;
287  velocity_command[2] = 0;
288  velocity_command[3] = 0;
289  velocity_command[4] = 0;
290 
291  Time::delay(0.3);
292 
293  stuck_counter++;
294 
295  if(stuck_counter >= 10)
296  {
297  if(panning_counter > 5)
298  {
299  yDebug() << "Switch to default position mode!";
300  mode = Mode::GO_DEFAULT;
301  stuck_counter = 0;
302  }
303  else
304  {
305  yDebug() << "Switch to face searching mode!";
306  mode = Mode::FACE_SEARCHING;
307  stuck_counter = 0;
308  }
309  }
310  else { // for now, stay in face tracking mode
311  mode = Mode::FACE_TRACKING;
312  }
313  }
314  vel->velocityMove(velocity_command.data());
315 }
316 
317 int faceTrackerModule::getBiggestFaceIdx(const cv::Mat& cvMatImageLeft, const std::vector<cv::Rect> &faces_left) {
318  int biggest_face_left_idx = 0;
319  int biggest_face_left_size_buf = 0;
320 
321  for(unsigned int i=0; i<faces_left.size(); i++)
322  {
323  cv::Point flb(faces_left[i].x + faces_left[i].width,
324  faces_left[i].y + faces_left[i].height);
325  cv::Point ftr(faces_left[i].x, faces_left[i].y);
326 
327  cv::rectangle(cvMatImageLeft, flb, ftr, cv::Scalar(0,255,0), 3,4,0);
328 
329  if(biggest_face_left_size_buf < faces_left[i].height)
330  {
331  biggest_face_left_size_buf = faces_left[i].height;
332  biggest_face_left_idx = i;
333  }
334  }
335  return biggest_face_left_idx;
336 }
337 
338 
339 /***************************************************************************/
341  ImageOf<PixelRgb> *yarpImageLeft = imagePortLeft.read();
342 
343  if ( cvIplImageLeft == nullptr ) {
344  cvIplImageLeft = cvCreateImage(cvSize(yarpImageLeft->width(),yarpImageLeft->height()), IPL_DEPTH_8U,3);
345  }
346 
347  // convert to cv::Mat
348  cvCvtColor((IplImage*)yarpImageLeft->getIplImage(), cvIplImageLeft, CV_RGB2BGR);
349  cv::Mat cvMatImageLeft=cv::cvarrToMat(cvIplImageLeft);
350 
351  if(yarpImageLeft!=nullptr)
352  {
353  // resize images (downsample to 320x240)
354  cv::resize(cvMatImageLeft, cvMatImageLeft, cv::Size(320, 240), 0,0,CV_INTER_NN);
355 
356  // convert captured frame to gray scale & equalize histogram
357  cv::Mat cvMatGrayImageLeft;
358  cv::cvtColor(cvMatImageLeft, cvMatGrayImageLeft, CV_BGR2GRAY);
359  cv::equalizeHist(cvMatGrayImageLeft, cvMatGrayImageLeft);
360 
361  // face detection routine
362 
363  std::vector<cv::Rect> faces_left; // a vector array to store the face found
364 
365  face_classifier_left.detectMultiScale(cvMatGrayImageLeft, faces_left,
366  1.1, // increase search scale by 10% each pass
367  3, // merge groups of three detections
368  CV_HAAR_DO_CANNY_PRUNING,
369  cv::Size(30,30),
370  cv::Size(50,50));
371 
372  int biggest_face_left_idx = getBiggestFaceIdx(cvMatImageLeft, faces_left);
373 
374  yarp::sig::VectorOf<double> new_encoder_readings;
375  new_encoder_readings.resize(nr_jnts);
376 
377  if(enc->getEncoders(new_encoder_readings.data())) {
378  cur_encoders = new_encoder_readings;
379  }
380 
381  if (mode == Mode::GO_DEFAULT) {
382  moveToDefaultPosition();
383  } else if (mode == Mode::FACE_SEARCHING) {
384  faceSearching(faces_left.size());
385  } else if (mode == Mode::FACE_TRACKING) {
386  faceTracking(faces_left, biggest_face_left_idx);
387  } else if (mode == Mode::PAUSED) {
388  yInfo() << "Not doing anything as module is paused";
389  yarp::os::Time::delay(0.5);
390  }
391  }
392 
393  return true;
394 }
bool configure(yarp::os::ResourceFinder &rf)
configure all the module parameters and return true if successful
Definition: faceTracker.cpp:29
bool updateModule()
get an image from the robot&#39;s camera and decide which mode should be chosen next
bool respond(const yarp::os::Bottle &command, yarp::os::Bottle &reply)
void moveToDefaultPosition()
Move to center position (after being stuck)
STL namespace.
void faceSearching(bool face_found)
Randomly move around.
bool close()
close and shut down the module
bool interruptModule()
interrupt the ports
void faceTracking(const std::vector< cv::Rect > &faces_left, int biggest_face_left_idx)
If a face was found, track it.