icub-basic-demos
pf3dTracker.cpp
1 
14 // NOTES:
15 // distances are trated as in millimeters inside the program, but they are converted to meters when communicating to the outside.
16 // likelihood is normalized only when communicated to the outside.
17 
18 // TODO
19 //
20 // 0. make things compile and build.
21 //
22 // 1. test that the function that computes the histogram from the RGB image works.
23 //
24 // 1. check that things behave properly
25 //
26 // 2. make sure randomization is really random.
27 //
28 // 3. remove stuff that is not used
29 //
30 // 4. make the code check the return values (ROBUSTNESS/DEBUGGABILITY).
31 //
32 // 5. try to optimize the code, make it faster.
33 //
34 
35 // X. fix the memory leak. DONE
36 
37 // il RESAMPLING HA DEI PROBLEMI???: tipicamente la matrice delle particelle e' piena di n-a-n dopo il resampling, se uso poche particelle (non ho provato con molte a vedere come e' l'output).
38 // con 5000 particelle sembra che funzioni tutto...
39 
40 //things to be done:
41 
42 //1. when the ball is moving towards the camera, the tracker lags behind it... is this because particles that are nearer than the true value to the camera have zero likelihood while the ones that are farther have higher likelihood values? how could i test this? plotting the positions of the particles? how could i solve this?
43 
44 //2. check the resampling function: sometimes when the image goes blank the tracker programs quits suddenly. it's probably that function trying to write a particle value in an area where it should not (array overflow).
45 
46 // measure which parts of the tracking loop are using up processing time.
47 
48 //WISHLIST:
49 //turn all static arrays into pointers to pointers, so that their size can be chosen at run time (through the configuration file)
50 //remove unnecessary included headers and namespaces.
51 //comment the whole project in doxygen style.
52 //create a pair of support files (.hpp and .cpp) specifically for this project.
53 //create a variable to contain the parameters for the motion model
54 //the size of the image is not a parameter.
55 //there are some functions which have among their parameters "inside_outside_...". this should be fixed, as this value is now defined at compile time.
56 
57 //DONE:
58 //fixed: memory leak. when is the memory allocated by OpenCV freed? never? take a look at the small functions I call repeatedly: they might be the cause for the memory leak. use: cvReleaseImage.
59 //the images are visualized through opencv: change this to the output video port.
60 //the output data port is not used so far: it should stream the numeric values of the estimates.
61 //fixed the seg-fault problem. s-f happen when stdDev is high (or at the beginning of a tracking)... seems like a problem in the resampling algorithm.
62 //fixed. it was a prolem in the randomization that was not really random. 1. check the resampling algorithm: when the acceleration error is low, the tracker starts with particles in 0,0,0 and stays there.
63 //maybe this is due to the fact that the first images the tracker receives are meaningless, the likelihood is quite low, so the particles are spread a lot and one happens to be near the origin. all the particles are concentrated there and if you don't have a high acceleration noise you get stuck there.
64 //done. write particles status/likelihood on the particleOutputPort, for visualization purposes.
65 // write a client that reads this data and plots particles.
66 
67 #include <cmath>
68 #include <ctime>
69 #include <fstream>
70 #include <iomanip>
71 #include <iostream>
72 #include <sstream>
73 #include <utility>
74 
75 #include <opencv2/highgui/highgui.hpp>
76 #include <opencv2/core/core_c.h>
77 
78 #include <yarp/os/Log.h>
79 #include <yarp/os/LogStream.h>
80 #include <yarp/os/ResourceFinder.h>
81 #include <yarp/cv/Cv.h>
82 
83 #include <iCub/pf3dTracker.hpp>
84 
85 using namespace std;
86 using namespace yarp::os;
87 using namespace yarp::sig;
88 using namespace yarp::cv;
89 
90 void printMat(CvMat* A);
91 
92 //constructor
93 PF3DTracker::PF3DTracker()
94 {
95 }
96 
97 //destructor
98 PF3DTracker::~PF3DTracker()
99 {
100 }
101 
102 //member function that set the object up.
103 bool PF3DTracker::configure(ResourceFinder &rf)
104 {
105  _doneInitializing=false;
106 
107  bool failure;
108  bool quit;
109  string trackedObjectColorTemplate;
110  string dataFileName;
111  string trackedObjectShapeTemplate;
112  string motionModelMatrix;
113  string temp;
114  int row, column;
115  double widthRatio, heightRatio;
116 
117  quit=false;
118  _saveImagesWithOpencv=false;
119 
120  _lut = new Lut[256*256*256];
121  //create the look up table lut
122  //this shouldn't be done every time
123  //TOBEDONE: write the lut on a file and read it back.
124  fillLut(_lut);
125 
126  srand((unsigned int)time(0)); //make sure random numbers are really random.
127  rngState = cvRNG(rand());
128 
129  //allocate some memory and initialize some data structures for colour histograms.
130  int dimensions;
131  dimensions=3;
132  int sizes[3]={YBins,UBins,VBins};
133  _modelHistogramMat=cvCreateMatND(dimensions, sizes, CV_32FC1);
134  if(_modelHistogramMat==0)
135  {
136  yWarning("PF3DTracker::open - I wasn\'t able to allocate memory for _modelHistogramMat.");
137  quit =true;
138  }
139  _innerHistogramMat=cvCreateMatND(dimensions, sizes, CV_32FC1);
140  if(_innerHistogramMat==0)
141  {
142  yWarning("PF3DTracker::open - I wasn\'t able to allocate memory for _innerHistogramMat.");
143  quit =true;
144  }
145  _outerHistogramMat=cvCreateMatND(dimensions, sizes, CV_32FC1);
146  if(_outerHistogramMat==0)
147  {
148  yWarning("PF3DTracker::open - I wasn\'t able to allocate memory for _outerHistogramMat.");
149  quit =true;
150  }
151 
152  _model3dPointsMat=cvCreateMat(3, 2*nPixels, CV_32FC1);
153  if(_model3dPointsMat==0)
154  {
155  yWarning("PF3DTracker::open - I wasn\'t able to allocate memory for _model3dPointsMat.");
156  quit =true;
157  }
158  _points2Mat=cvCreateMat(3, 2*nPixels, CV_32FC1);
159  if(_points2Mat==0)
160  {
161  yWarning("PF3DTracker::open - I wasn\'t able to allocate memory for _points2Mat.");
162  quit =true;
163  }
164  _tempMat=cvCreateMat(3, 2*nPixels, CV_32FC1);
165  if(_tempMat==0)
166  {
167  yWarning("PF3DTracker::open - I wasn\'t able to allocate memory for _tempMat.");
168  quit =true;
169  }
170 
171  //***********************************
172  //Read options from the command line.
173  //***********************************
174  string initializationFile = rf.check("from",Value("pf3dTracker.ini"),"Initialization file (string)").asString();
175  string context = rf.check("context",Value("pf3dTracker"),"Context (string)").asString();
176 
177  /*
178  //create and initialize the resource finder
179  ResourceFinder rf;
180  rf.setDefaultContext(context);
181  rf.setDefaultConfigFile(initializationFile);
182  rf.configure(0, NULL);
183  */
184 
185  // pass configuration over to bottle
186  Bottle botConfig(rf.toString());
187  //botConfig.setMonitor(config.getMonitor()); //is this needed?
188 
189 
190  _inputVideoPortName = botConfig.check("inputVideoPort",
191  Value("/pf3dTracker/video:i"),
192  "Input video port (string)").asString();
193  _inputVideoPort.open(_inputVideoPortName);
194 
195  _outputVideoPortName = botConfig.check("outputVideoPort",
196  Value("/pf3dTracker/video:o"),
197  "Output video port (string)").asString();
198  _outputVideoPort.open(_outputVideoPortName);
199 
200  _outputDataPortName = botConfig.check("outputDataPort",
201  Value("/pf3dTracker/data:o"),
202  "Output data port (string)").asString();
203  _outputDataPort.open(_outputDataPortName);
204 
205  _inputParticlePortName = botConfig.check("inputParticlePort",
206  Value("/pf3dTracker/particles:i"),
207  "Input particle port (string)").asString();
208  _inputParticlePort.open(_inputParticlePortName);
209 
210  _outputParticlePortName = botConfig.check("outputParticlePort",
211  Value("/pf3dTracker/particles:o"),
212  "Output particle port (string)").asString();
213  _outputParticlePort.open(_outputParticlePortName);
214 
215  _outputAttentionPortName = botConfig.check("outputAttentionPort",
216  Value("/pf3dTracker/attention:o"),
217  "Output attention port (string)").asString();
218  _outputAttentionPort.open(_outputAttentionPortName);
219 
220  _likelihoodThreshold = (float)botConfig.check("likelihoodThreshold",
221  Value(1.0),
222  "Likelihood threshold value (double)").asFloat64();
223 
224  _attentionOutputMax = botConfig.check("attentionOutputMax",
225  Value(257),
226  "attentionOutputMax (double)").asFloat64();
227  _attentionOutputDecrease = botConfig.check("attentionOutputDecrease",
228  Value(0.99),
229  "attentionOutputDecrease (double)").asFloat64();
230 
231  if (botConfig.check("outputUVDataPort"))
232  {
233  supplyUVdata = true;
234  _outputUVDataPortName = botConfig.check("outputUVDataPort",
235  Value("/PF3DTracker/dataUVOut"),
236  "Image plane output data port (string)").asString();
237  _outputUVDataPort.open(_outputUVDataPortName);
238  }
239  else
240  {
241  yWarning("No (u,v) data will be supplied");
242  supplyUVdata = false;
243  }
244 
245  _nParticles = botConfig.check("nParticles",
246  Value("1000"),
247  "Number of particles used in the tracker (int)").asInt32();
248 
249  _colorTransfPolicy = botConfig.check("colorTransfPolicy",
250  Value("1"),
251  "Color transformation policy (int)").asInt32();
252  if(_colorTransfPolicy!=0 && _colorTransfPolicy!=1)
253  {
254  yWarning() << "Color trasformation policy "<<_colorTransfPolicy<<" is not yet implemented.";
255  quit=true; //stop the execution, after checking all the parameters.
256  }
257 
258  _inside_outside_difference_weight = (float)botConfig.check("insideOutsideDiffWeight",
259  Value("1.5"),
260  "Inside-outside difference weight in the likelihood function (double)").asFloat64();
261 
262  _projectionModel = botConfig.check("projectionModel",
263  Value("perspective"),
264  "Projection model (string)").asString();
265 
266  if(_projectionModel=="perspective")
267  {
268  bool rfOk=false;
269  if (botConfig.check("cameraContext") && botConfig.check("cameraFile") && botConfig.check("cameraGroup"))
270  {
271  ResourceFinder camera_rf;
272  camera_rf.setDefaultContext(botConfig.find("cameraContext").asString().c_str());
273  camera_rf.setDefaultConfigFile(botConfig.find("cameraFile").asString().c_str());
274  camera_rf.configure(0,NULL);
275  Bottle &params=camera_rf.findGroup(botConfig.find("cameraGroup").asString());
276  if (!params.isNull())
277  {
278  _calibrationImageWidth =params.check("w",Value(320)).asInt32();
279  _calibrationImageHeight=params.check("h",Value(240)).asInt32();
280  _perspectiveFx =(float)params.check("fx",Value(257.34)).asFloat64();
281  _perspectiveFy =(float)params.check("fy",Value(257.34)).asFloat64();
282  _perspectiveCx =(float)params.check("cx",Value(160.0)).asFloat64();
283  _perspectiveCy =(float)params.check("cy",Value(120.0)).asFloat64();
284  rfOk=true;
285  }
286  }
287 
288  if (!rfOk)
289  {
290  _calibrationImageWidth =botConfig.check("w",Value(320)).asInt32();
291  _calibrationImageHeight=botConfig.check("h",Value(240)).asInt32();
292  _perspectiveFx =(float)botConfig.check("perspectiveFx",Value(257.34)).asFloat64();
293  _perspectiveFy =(float)botConfig.check("perspectiveFy",Value(257.34)).asFloat64();
294  _perspectiveCx =(float)botConfig.check("perspectiveCx",Value(160.0)).asFloat64();
295  _perspectiveCy =(float)botConfig.check("perspectiveCy",Value(120.0)).asFloat64();
296  }
297 
298  cout<<"w ="<<_calibrationImageWidth<<endl;
299  cout<<"h ="<<_calibrationImageHeight<<endl;
300  cout<<"fx="<<_perspectiveFx<<endl;
301  cout<<"fy="<<_perspectiveFy<<endl;
302  cout<<"cx="<<_perspectiveCx<<endl;
303  cout<<"cy="<<_perspectiveCy<<endl;
304  }
305  else
306  {
307  if(_projectionModel=="equidistance" || _projectionModel=="unified")
308  {
309  yWarning() <<"Projection model "<<_projectionModel<<" is not yet implemented.";
310  quit=true; //stop the execution, after checking all the parameters.
311  }
312  else
313  {
314  yWarning() <<"The projection model you specified ("<<_projectionModel<<") is not supported.";
315  quit=true; //stop the execution, after checking all the parameters.
316  }
317  }
318 
319  _initializationMethod = botConfig.check("initializationMethod",
320  Value("search"),
321  "Initialization method (string)").asString();
322 
323  if(_initializationMethod=="3dEstimate")
324  {
325  _initialX = botConfig.check("initialX",
326  Value("0"),
327  "Estimated initial X position [m] (double)").asFloat64()*1000; //meters to millimeters
328  _initialY = botConfig.check("initialY",
329  Value("0"),
330  "Estimated initial Y position [m] (double)").asFloat64()*1000; //meters to millimeters
331  _initialZ = botConfig.check("initialZ",
332  Value("1000"),
333  "Estimated initial Z position [m] (double)").asFloat64()*1000; //meters to millimeters
334  }
335  else
336  {
337  if(_initializationMethod=="2dEstimate" || _initializationMethod=="search")
338  {
339  yWarning() << "Initialization method "<<_initializationMethod<<" is not yet implemented.";
340  quit=true; //stop the execution, after checking all the parameters.
341  }
342  else
343  {
344  yWarning() << "The initialization method you specified ("<<_initializationMethod<<") is not supported.";
345  quit=true; //stop the execution, after checking all the parameters.
346  }
347  }
348 
349  _trackedObjectType = botConfig.check("trackedObjectType",
350  Value("sphere"),
351  "Tracked object type (string)").asString();
352  if(_trackedObjectType=="sphere")
353  {
354  ;//good, this one is implemented.
355  //0 means inner and outer circle, 1 means just one circle of the correct size.
356  _circleVisualizationMode = botConfig.check("circleVisualizationMode",
357  Value("0"),
358  "Visualization mode for the sphere (int)").asInt32();
359  }
360  else
361  {
362  if(_trackedObjectType=="parallelogram")
363  {
364  yWarning() << "Tracked object type "<<_trackedObjectType<<" is not yet implemented.";
365  quit=true; //stop the execution, after checking all the parameters.
366  }
367  else
368  {
369  yWarning() << "The tracked object type you specified ("<<_trackedObjectType<<") is not supported.";
370  quit=true; //stop the execution, after checking all the parameters.
371  }
372  }
373 
374  //*****************************************************
375  //Build and read the color model for the tracked object
376  //*****************************************************
377  //
378  trackedObjectColorTemplate = rf.findFile("trackedObjectColorTemplate");
379  dataFileName = rf.findFile("trackedObjectTemp");
380  //cout<<"$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$"<<trackedObjectColorTemplate<<endl;
381 
382  failure=computeTemplateHistogram(trackedObjectColorTemplate,dataFileName);
383  if(failure)
384  {
385  yWarning("I had troubles computing the template histogram.");
386  quit=true;
387  }
388 
389  failure=readModelHistogram(_modelHistogramMat,dataFileName.c_str());
390  if(failure)
391  {
392  yWarning("I had troubles reading the template histogram.");
393  quit=true;
394  }
395 
396  //*******************************************
397  //Read the shape model for the tracked object
398  //*******************************************
399  trackedObjectShapeTemplate = rf.findFile("trackedObjectShapeTemplate");
400  failure=readInitialmodel3dPoints(_model3dPointsMat,trackedObjectShapeTemplate);
401  if(failure)
402  {
403  yWarning("I had troubles reading the model 3D points.");
404  quit=true;
405  }
406 
407  if((_trackedObjectType=="sphere") && (_circleVisualizationMode==1))
408  {
409  //create _visualization3dPointsMat and fill it with the average between outer and inner 3D points.
410  _visualization3dPointsMat=cvCreateMat( 3, 2*nPixels, CV_32FC1 );
411  //only the first half of this matrix is used. the second part can be full of rubbish (not zeros, I guess).
412  cvSet(_visualization3dPointsMat,(cvScalar(1)));
413  for(row=0;row<3;row++)
414  {
415  for(column=0;column<nPixels;column++)
416  {
417  ((float*)(_visualization3dPointsMat->data.ptr + _visualization3dPointsMat->step*row))[column]=(((float*)(_model3dPointsMat->data.ptr + _model3dPointsMat->step*row))[column]+((float*)(_model3dPointsMat->data.ptr + _model3dPointsMat->step*row))[column+nPixels])/2;
418  }
419  }
420  }
421 
422  //***************************************************
423  //Read the motion model matrix for the tracked object
424  //***************************************************
425 
426  _accelStDev = (float)botConfig.check("accelStDev",
427  Value(150.0),
428  "StDev of acceleration noise (double)").asFloat64();
429 
430  motionModelMatrix = rf.findFile("motionModelMatrix");
431 
432  //allocate space for the _A matrix. 32bit floats, one channel.
433  _A=cvCreateMat(7,7,CV_32FC1);
434  failure=readMotionModelMatrix(_A, motionModelMatrix);
435  if(failure)
436  {
437  yWarning("I had troubles reading the motion model matrix.");
438  quit=true;
439  }
440 
441  //allocate memory for the particles;
442  _particles=cvCreateMat(7,_nParticles,CV_32FC1);
443  //fill the memory with zeros, so that valgrind won't complain.
444  cvSetZero(_particles);
445 
446  //define ways of accessing the particles:
447  _particles1 = cvCreateMatHeader( 1,_nParticles, CV_32FC1);
448  cvInitMatHeader( _particles1, 1, _nParticles, CV_32FC1, _particles->data.ptr, _particles->step );
449  _particles2 = cvCreateMatHeader( 1,_nParticles, CV_32FC1);
450  cvInitMatHeader( _particles2, 1, _nParticles, CV_32FC1, _particles->data.ptr + _particles->step*1, _particles->step );
451  _particles3 = cvCreateMatHeader( 1,_nParticles, CV_32FC1);
452  cvInitMatHeader( _particles3, 1, _nParticles, CV_32FC1, _particles->data.ptr + _particles->step*2, _particles->step );
453  _particles4 = cvCreateMatHeader( 1,_nParticles, CV_32FC1);
454  cvInitMatHeader( _particles4, 1, _nParticles, CV_32FC1, _particles->data.ptr + _particles->step*3, _particles->step );
455  _particles5 = cvCreateMatHeader( 1,_nParticles, CV_32FC1);
456  cvInitMatHeader( _particles5, 1, _nParticles, CV_32FC1, _particles->data.ptr + _particles->step*4, _particles->step );
457  _particles6 = cvCreateMatHeader( 1,_nParticles, CV_32FC1);
458  cvInitMatHeader( _particles6, 1, _nParticles, CV_32FC1, _particles->data.ptr + _particles->step*5, _particles->step );
459  _particles7 = cvCreateMatHeader( 1,_nParticles, CV_32FC1);
460  cvInitMatHeader( _particles7, 1, _nParticles, CV_32FC1, _particles->data.ptr + _particles->step*6, _particles->step );
461  _particles1to6 = cvCreateMatHeader( 6,_nParticles, CV_32FC1);
462  cvInitMatHeader( _particles1to6, 6, _nParticles, CV_32FC1, _particles->data.ptr, _particles->step );
463 
464  //allocate memory for the "new" particles;
465  _newParticles=cvCreateMat(7,_nParticles,CV_32FC1);
466  _newParticles1to6 = cvCreateMatHeader( 6,_nParticles, CV_32FC1);
467  cvInitMatHeader( _newParticles1to6, 6, _nParticles, CV_32FC1, _newParticles->data.ptr, _newParticles->step );
468 
469  //allocate memory for "noise"
470  _noise=cvCreateMat(6,_nParticles,CV_32FC1);
471  cvSetZero(_noise);
472  _noise1 = cvCreateMatHeader( 3,_nParticles, CV_32FC1);
473  cvInitMatHeader( _noise1, 3, _nParticles, CV_32FC1, _noise->data.ptr, _noise->step );
474  cvSetZero(_noise1);
475  _noise2 = cvCreateMatHeader( 3,_nParticles, CV_32FC1);
476  cvInitMatHeader( _noise2, 3, _nParticles, CV_32FC1, _noise->data.ptr + _noise->step*3, _noise->step );
477  cvSetZero(_noise2);
478 
479  //resampling-related stuff.
480  _nChildren = cvCreateMat(1,_nParticles,CV_32FC1);
481  _label = cvCreateMat(1,_nParticles,CV_32FC1);
482  _ramp = cvCreateMat(1,_nParticles,CV_32FC1);
483  _u = cvCreateMat(1,_nParticles,CV_32FC1);
484 
485  _cumWeight =cvCreateMat(1,_nParticles+1,CV_32FC1);
486 
487  int count;
488  for(count=0;count<_nParticles;count++)
489  {
490  ((float*)(_ramp->data.ptr))[count]=(float)count+1.0F;
491  }
492 
493  temp = botConfig.check("saveImagesWithOpencv",
494  Value("false"),
495  "Save elaborated images with OpenCV? (string)").asString();
496  if(temp=="true")
497  {
498  _saveImagesWithOpencv=true;
499  }
500 
501  if(_saveImagesWithOpencv)
502  {
503  _saveImagesWithOpencvDir = botConfig.check("saveImagesWithOpencvDir",
504  Value(""),
505  "Directory where to save the elaborated images (string)").asString();
506  }
507 
508  if(_initializationMethod=="3dEstimate")
509  {
510  //cout<<"Initialization method = 3dEstimate."<<endl;
511  //*************************************************************************
512  //generate a set of random particles near the estimated initial 3D position
513  //*************************************************************************
514 
515  float mean,velocityStDev;
516  velocityStDev=0; //warning ??? !!! I'm setting parameters for the dynamic model here.
517 
518  //initialize X
519  mean=(float)_initialX;
520  cvRandArr( &rngState, _particles1, CV_RAND_NORMAL, cvScalar(mean), cvScalar(_accelStDev));
521  //initialize Y
522  mean=(float)_initialY;
523  cvRandArr( &rngState, _particles2, CV_RAND_NORMAL, cvScalar(mean), cvScalar(_accelStDev));
524  //initialize Z
525  mean=(float)_initialZ;
526  cvRandArr( &rngState, _particles3, CV_RAND_NORMAL, cvScalar(mean), cvScalar(_accelStDev));
527  //initialize VX
528  mean=0;
529  cvRandArr( &rngState, _particles4, CV_RAND_NORMAL, cvScalar(mean), cvScalar(velocityStDev));
530  //initialize VY
531  cvRandArr( &rngState, _particles5, CV_RAND_NORMAL, cvScalar(mean), cvScalar(velocityStDev));
532  //initialize VZ
533  cvRandArr( &rngState, _particles6, CV_RAND_NORMAL, cvScalar(mean), cvScalar(velocityStDev));
534  }
535 
536  downsampler=0; //this thing is used to send less data to the plotter
537 
538  //Matrices-related stuff.
539  //connect headers to data, allocate space...
540  _rzMat = cvCreateMat(3, 3, CV_32FC1);
541  _ryMat = cvCreateMat(3, 3, CV_32FC1);
542  _uv = cvCreateMat(2,2*nPixels, CV_32FC1);
543 
544  _tempMat1 = cvCreateMatHeader( 1,2*nPixels, CV_32FC1);
545  cvInitMatHeader( _tempMat1, 1, 2*nPixels, CV_32FC1,_tempMat->data.ptr, _tempMat->step );
546 
547  _tempMat2 = cvCreateMatHeader( 1,2*nPixels, CV_32FC1);
548  cvInitMatHeader( _tempMat2, 1, 2*nPixels, CV_32FC1, _tempMat->data.ptr+_tempMat->step*1, _tempMat->step ); //FUNZIONA? ??? !!!
549 
550  _tempMat3 = cvCreateMatHeader( 1,2*nPixels, CV_32FC1);
551  cvInitMatHeader( _tempMat3, 1, 2*nPixels, CV_32FC1, _tempMat->data.ptr+_tempMat->step*2, _tempMat->step ); //FUNZIONA? ??? !!!
552 
553  _p2Mat1 = cvCreateMatHeader( 1,2*nPixels, CV_32FC1);
554  cvInitMatHeader( _p2Mat1, 1, 2*nPixels, CV_32FC1, _points2Mat->data.ptr ); //FUNZIONA? ??? !!!
555  _p2Mat3 = cvCreateMatHeader( 1,2*nPixels, CV_32FC1);
556  cvInitMatHeader( _p2Mat3, 1, 2*nPixels, CV_32FC1, _points2Mat->data.ptr+_points2Mat->step*2, _points2Mat->step ); //FUNZIONA? ??? !!!
557 
558  _drawingMat=cvCreateMat(3, 2*nPixels, CV_32FC1);
559  _projectionMat=cvCreateMat(2, 3, CV_32FC1);
560 
561  _xyzMat1 = cvCreateMatHeader(1,2*nPixels,CV_32FC1);
562  _xyzMat2 = cvCreateMatHeader(1,2*nPixels,CV_32FC1);
563  _xyzMat3 = cvCreateMatHeader(1,2*nPixels,CV_32FC1);
564 
565  //testOpenCv(); //Used to test stuff.
566 
567  //**********************************
568  //Write the header line for the data
569  //**********************************
570  //cout<<"\033[37;1m"<<" frame#";
571  //cout<<"\033[32;1m"<<" meanX";
572  //cout<<" meanY";
573  //cout<<" meanZ";
574  //cout<<"\033[37;1m Likelihood";
575  //cout<<" Seing";
576  //cout<<"\033[33;1m meanU";
577  //cout<<" meanV";
578  //cout<<"\033[37;1m fps"<<"\033[0m"<<endl;
579 
580  cout<<" frame#";
581  cout<<" meanX";
582  cout<<" meanY";
583  cout<<" meanZ";
584  cout<<" Likelihood";
585  cout<<" Seing";
586  cout<<" meanU";
587  cout<<" meanV";
588  cout<<" fps"<<endl;
589 
590  //*********************************************************************
591  //Read one image from the stream.
592  //*********************************************************************
593 
594  _yarpImage = _inputVideoPort.read();
595  _inputVideoPort.getEnvelope(_yarpTimestamp);
596 
597  if (_yarpImage != NULL)
598  {
599  widthRatio=(double)_yarpImage->width()/(double)_calibrationImageWidth;
600  heightRatio=(double)_yarpImage->height()/(double)_calibrationImageHeight;
601  _perspectiveFx=_perspectiveFx*(float)widthRatio;
602  _perspectiveFy=_perspectiveFy*(float)heightRatio;
603  _perspectiveCx=_perspectiveCx*(float)widthRatio;
604  _perspectiveCy=_perspectiveCy*(float)heightRatio;
605 
606  _rawImage = cvCreateImage(cvSize(_yarpImage->width(),_yarpImage->height()),IPL_DEPTH_8U, 3); //This allocates space for the image.
607  _transformedImage = cvCreateImage(cvSize(_yarpImage->width(),_yarpImage->height()),IPL_DEPTH_8U, 3); //This allocates space for the image.
608  toCvMat(*_yarpImage).copyTo(cv::cvarrToMat(_rawImage));
609 
610  rgbToYuvBinImageLut(_rawImage,_transformedImage,_lut);
611 
612  //allocate space for the transformed image.
613  _transformedImage = cvCreateImage(cvSize(_yarpImage->width(),_yarpImage->height()),IPL_DEPTH_8U,3);
614 
615  _framesNotTracking=0;
616  _frameCounter=1;
617  _attentionOutput=0;
618 
619  _lastU=_yarpImage->width()/2.0F;
620  _lastV=_yarpImage->height()/2.0F;
621 
622  _initialTime=0;
623  _finalTime=0;
624  _firstFrame=true;
625  }
626 
627  if(quit==true)
628  {
629  yWarning("There were problems initializing the object: the execution was interrupted.");
630  return false; //there were problems initializing the objet: stop the execution.
631  }
632  else
633  {
634  _doneInitializing=true;
635  return true; //the object was set up successfully.
636  }
637 }
638 
639 //member that closes the object.
640 bool PF3DTracker::close()
641 {
642  _inputVideoPort.close();
643  _outputVideoPort.close();
644  _outputDataPort.close();
645  _outputUVDataPort.close();
646  _inputParticlePort.close();
647  _outputParticlePort.close();
648  _outputAttentionPort.close();
649 
650  if (_A != NULL)
651  cvReleaseMat(&_A);
652 
653  if (_particles != NULL)
654  cvReleaseMat(&_particles);
655 
656  if (_newParticles != NULL)
657  cvReleaseMat(&_newParticles);
658 
659  return true;
660 }
661 
662 //member that closes the object.
663 bool PF3DTracker::interruptModule()
664 {
665  _inputVideoPort.interrupt();
666  _outputVideoPort.interrupt();
667  _outputDataPort.interrupt();
668  _outputUVDataPort.interrupt();
669  _inputParticlePort.interrupt();
670  _outputParticlePort.interrupt();
671  _outputAttentionPort.interrupt();
672 
673  return true;
674 }
675 
676 //member that is repeatedly called by YARP, to give this object the chance to do something.
677 //should this function return "false", the object would be terminated.
678 //I already have one image, when I get here (I either acquire it in the initialization method or in the end of this same method).
679 bool PF3DTracker::updateModule()
680 {
681  if(_doneInitializing)
682  {
683  int count;
684  unsigned int seed;
685  float likelihood, mean, maxX, maxY, maxZ;
686  float weightedMeanX, weightedMeanY, weightedMeanZ;
687  float meanU;
688  float meanV;
689  float wholeCycle;
690  string outputFileName;
691  stringstream out;
692 
693  seed=rand();
694 
695  _finalTime=yarp::os::Time::now();
696  wholeCycle=(float)(_finalTime-_initialTime);
697  _initialTime=yarp::os::Time::now();
698 
699  //*****************************************
700  //calculate the likelihood of each particle
701  //*****************************************
702  float sumLikelihood=0.0;
703  float maxLikelihood=0.0;
704  int maxIndex=-1;
705  for(count=0;count< _nParticles;count++)
706  {
707  if(_colorTransfPolicy==0)
708  {
709  evaluateHypothesisPerspective(_model3dPointsMat,(float)cvmGet(_particles,0,count),(float)cvmGet(_particles,1,count),(float)cvmGet(_particles,2,count),_modelHistogramMat,_transformedImage,_perspectiveFx,_perspectiveFy, _perspectiveCx,_perspectiveCy,_inside_outside_difference_weight,likelihood);
710  }
711  else
712  {
713  if(_colorTransfPolicy==1)
714  {
715  evaluateHypothesisPerspectiveFromRgbImage(_model3dPointsMat,(float)cvmGet(_particles,0,count),(float)cvmGet(_particles,1,count),(float)cvmGet(_particles,2,count),_modelHistogramMat,_rawImage,_perspectiveFx,_perspectiveFy, _perspectiveCx,_perspectiveCy,_inside_outside_difference_weight,likelihood);
716  }
717  else
718  {
719  yWarning() << "Wrong ID for color transformation policy:"<<_colorTransfPolicy<<". Quitting.";
720  return false;
721  }
722  }
723 
724  cvmSet(_particles,6,count,likelihood);
725  sumLikelihood+=likelihood;
726  if(likelihood>maxLikelihood)
727  {
728  maxLikelihood=likelihood;
729  maxIndex=count;
730  }
731  }
732 
733 
734  if(maxIndex!=-1)
735  {
736  maxX=(float)cvmGet(_particles,0,maxIndex);
737  maxY=(float)cvmGet(_particles,1,maxIndex);
738  maxZ=(float)cvmGet(_particles,2,maxIndex);
739  }
740  else
741  {
742  maxX=1;
743  maxY=1;
744  maxZ=1000;
745  }
746 
747  if(maxLikelihood/exp((float)20.0)>_likelihoodThreshold) //normalizing likelihood
748  {
749  _seeingObject=1;
750  _framesNotTracking=0;
751  _attentionOutput=_attentionOutputMax;
752  }
753  else
754  {
755  _attentionOutput=_attentionOutput*_attentionOutputDecrease;
756  _seeingObject=0;
757  _framesNotTracking+=1;
758  }
759 
760  //******************************************************
761  // //send data to the plotter. COMMENTED FOR EFFICENCY
762  //******************************************************
763  // if(downsampler % 3 ==0)
764  // {
765  // //these values might be a mix of the ones before and after the resampling took place. ??? WARNING
766  // Bottle& particleOutput=_outputParticlePort.prepare();
767  // particleOutput.clear();
768  // for(count=0;count<_nParticles;count++)
769  // {
770  // particleOutput.addFloat64((double)(_particles[0][count]));
771  // particleOutput.addFloat64((double)(_particles[1][count]));
772  // particleOutput.addFloat64((double)(_particles[2][count]));
773  // particleOutput.addFloat64((double)(_particles[3][count]));
774  // particleOutput.addFloat64((double)(_particles[4][count]));
775  // particleOutput.addFloat64((double)(_particles[5][count]));
776  // particleOutput.addFloat64((double)(_particles[6][count]));
777  // }
778  // _outputParticlePort.write();
779  // }
780  // downsampler+=1;
781 
782  //If the likelihood has been under the threshold for 5 frames, reinitialize the tracker.
783  //This just works for the sphere.
784  if(_framesNotTracking==5 || sumLikelihood==0.0)
785  {
786  cout<<"**********************************************************************Reset\n";
787  float mean,velocityStDev;
788  velocityStDev=0; //warning ??? !!! I'm setting parameters for the dynamic model here.
789 
790  mean=(float)_initialX;
791  cvRandArr( &rngState, _particles1, CV_RAND_NORMAL, cvScalar(mean), cvScalar(_accelStDev));
792  //initialize Y
793  mean=(float)_initialY;
794  cvRandArr( &rngState, _particles2, CV_RAND_NORMAL, cvScalar(mean), cvScalar(_accelStDev));
795  //initialize Z
796  mean=(float)_initialZ;
797  cvRandArr( &rngState, _particles3, CV_RAND_NORMAL, cvScalar(mean), cvScalar(_accelStDev));
798  //initialize VX
799  mean=0;
800  cvRandArr( &rngState, _particles4, CV_RAND_NORMAL, cvScalar(mean), cvScalar(velocityStDev));
801  //initialize VY
802  cvRandArr( &rngState, _particles5, CV_RAND_NORMAL, cvScalar(mean), cvScalar(velocityStDev));
803  //initialize VZ
804  cvRandArr( &rngState, _particles6, CV_RAND_NORMAL, cvScalar(mean), cvScalar(velocityStDev));
805 
806  _framesNotTracking=0;
807 
808  weightedMeanX=weightedMeanY=weightedMeanZ=0.0; // UGO: they should be zeroed before accumulation
809  for(count=0;count<_nParticles;count++)
810  {
811  weightedMeanX+=(float)cvmGet(_particles,0,count);
812  weightedMeanY+=(float)cvmGet(_particles,1,count);
813  weightedMeanZ+=(float)cvmGet(_particles,2,count);
814  }
815  weightedMeanX/=_nParticles;
816  weightedMeanY/=_nParticles;
817  weightedMeanZ/=_nParticles;
818  //this mean is not weighted as there is no weight to use: the particles have just been generated.
819 
820  //*****************************************
821  //WRITE ESTIMATES TO THE SCREEN, FIRST PART
822  //*****************************************
823  //these are not really estimates, but...
824  cout<<setw(8)<<_frameCounter;
825  cout<<setiosflags(ios::fixed)<<setprecision(3)<<setw(8)<<weightedMeanX/1000; //millimeters to meters
826  cout<<" "<<setw(8)<<weightedMeanY/1000; //millimeters to meters
827  cout<<" "<<setiosflags(ios::fixed)<<setprecision(3)<<setw(8)<<weightedMeanZ/1000; //millimeters to meters
828  cout<<" "<<setiosflags(ios::fixed)<<setprecision(5)<<setw(8)<<maxLikelihood/exp((float)20.0); //normalizing likelihood
829  cout<<" "<<setw(5)<<_seeingObject;
830  }
831  else
832  {
833  //*********************************************
834  //Compute the mean and normalize the likelihood
835  //*********************************************
836  weightedMeanX=0.0;
837  weightedMeanY=0.0;
838  weightedMeanZ=0.0;
839  for(count=0;count<_nParticles;count++)
840  {
841  cvmSet(_particles,6,count,(cvmGet(_particles,6,count)/sumLikelihood));
842  weightedMeanX+=(float)(cvmGet(_particles,0,count)*cvmGet(_particles,6,count));
843  weightedMeanY+=(float)(cvmGet(_particles,1,count)*cvmGet(_particles,6,count));
844  weightedMeanZ+=(float)(cvmGet(_particles,2,count)*cvmGet(_particles,6,count));
845  }
846 
847  //*****************************************
848  //WRITE ESTIMATES TO THE SCREEN, FIRST PART
849  //*****************************************
850  //cout<<"\033[37;1m"<<setw(8)<<_frameCounter;
851  //cout<<" \033[32;1m"<<setiosflags(ios::fixed)<<setprecision(3)<<setw(8)<<weightedMeanX/1000; //millimeters to meters
852  //cout<<" "<<setw(8)<<weightedMeanY/1000; //millimeters to meters
853  //cout<<" "<<setiosflags(ios::fixed)<<setprecision(3)<<setw(8)<<weightedMeanZ/1000; //millimeters to meters
854  //cout<<" \033[37;1m"<<setiosflags(ios::fixed)<<setprecision(5)<<setw(8)<<maxLikelihood/exp((float)20.0); //normalizing likelihood
855  //cout<<" "<<setw(5)<<_seeingObject;
856 
857  cout<<setw(8)<<_frameCounter;
858  cout<<setiosflags(ios::fixed)<<setprecision(3)<<setw(8)<<weightedMeanX/1000; //millimeters to meters
859  cout<<" "<<setw(8)<<weightedMeanY/1000; //millimeters to meters
860  cout<<" "<<setiosflags(ios::fixed)<<setprecision(3)<<setw(8)<<weightedMeanZ/1000; //millimeters to meters
861  cout<<" "<<setiosflags(ios::fixed)<<setprecision(5)<<setw(8)<<maxLikelihood/exp((float)20.0); //normalizing likelihood
862  cout<<" "<<setw(5)<<_seeingObject;
863 
864  //------------------------------------------------------------martim
865  Bottle *particleInput = _inputParticlePort.read(false);
866  if (particleInput==NULL)
867  _numParticlesReceived=0;
868  else
869  _numParticlesReceived=(particleInput->get(0)).asInt32();
870  if(_numParticlesReceived > _nParticles)
871  {
872  _numParticlesReceived=0;
873  yWarning("PROBLEM: Input particles are more than nParticles.");
874  }
875  //------------------------------------------------------------end martim
876 
877  //**********************
878  //RESAMPLE THE PARTICLES
879  //**********************
880  int minimum_likelihood=10; //do not resample if maximum likelihood is lower than this.
881  //this is intended to prevent that the particles collapse on the origin when you start the tracker.
882  if(maxLikelihood>minimum_likelihood)
883  {
884  //TODO non funziona ancora, credo: nelle particelle resamplate ci sono dei not-a-number.
885  //systematicR(_particles1to6,_particles7,_newParticles); //SOMETHING'S WRONG HERE: sometimes the new particles look like being messed up ??? !!!
886  systematic_resampling(_particles1to6,_particles7,_newParticles,_cumWeight);
887  }
888  else //I can't apply a resampling with all weights equal to 0!
889  {
890  //TODO:CHECK that copying the whole thing creates no problems.
891  //I think I used to copy only 6 lines to make it faster.
892  //ippsCopy_32f(&_particles[0][0], &_newParticles[0][0], 6*_nParticles);
893  cvCopy(_particles,_newParticles);
894  }
895 
896  //the "good" particles now are in _newParticles
897  //******************************************
898  //APPLY THE MOTION MODEL: 1.APPLY THE MATRIX
899  //******************************************
900  cvMatMul(_A,_newParticles,_particles);
901 
902  //the "good" particles now are in _particles
903  //********************************************************
904  //APPLY THE MOTION MODEL: 2.ADD THE EFFECT OF ACCELERATION
905  //********************************************************
906  mean = 0; //NEW
907  //cout<<"Noise generation parameters: mean= "<<mean<<", accelStDev= "<<_accelStDev<<endl;
908  //cout<<"_noise1 before generation: "<<((float*)(_noise1->data.ptr + _noise->step*0))[0]<<endl;
909  cvRandArr( &rngState, _noise1, CV_RAND_NORMAL, cvScalar(mean), cvScalar(_accelStDev));
910  //cout<<"_noise1 after generation: "<<((float*)(_noise1->data.ptr + _noise->step*0))[0]<<endl;
911 
912  cvCopy(_noise1,_noise2);
913  cvConvertScale( _noise1, _noise1, 0.5, 0 );//influence on the position is half that on speed.
914  //cout<<"_noise1 after rescaling: "<<((float*)(_noise1->data.ptr + _noise->step*0))[0]<<endl;
915 
916  //cout<<"_noise2 after generation: "<<((float*)(_noise2->data.ptr + _noise->step*0))[0]<<endl;
917 
918  //cout<<"First element of _particles before addition of noise: "<<((float*)(_particles->data.ptr + _particles->step*0))[0]<<endl;
919  cvAdd(_particles1to6,_noise,_particles1to6);//sum the influence of the noise to the previous status
920  //cout<<"First element of _particles after addition of noise: "<<((float*)(_particles->data.ptr + _particles->step*0))[0]<<endl;
921 
922  //------------------------------------------------------------martim
923  // get particles from input
924  if(_numParticlesReceived > 0){
925  int topdownParticles = _nParticles - _numParticlesReceived;
926  for(count=0 ; count<_numParticlesReceived ; count++){
927  cvmSet(_particles,0,topdownParticles+count, (particleInput->get(1+count*3+0)).asFloat64());
928  cvmSet(_particles,1,topdownParticles+count, (particleInput->get(1+count*3+1)).asFloat64());
929  cvmSet(_particles,2,topdownParticles+count, (particleInput->get(1+count*3+2)).asFloat64());
930  cvmSet(_particles,3,topdownParticles+count, 0);
931  cvmSet(_particles,4,topdownParticles+count, 0);
932  cvmSet(_particles,5,topdownParticles+count, 0);
933  cvmSet(_particles,6,topdownParticles+count, 0.8); //??
934  }
935  //num_bottomup_objects=(particleInput->get(1+count*3)).asInt32();
936  }
937  //------------------------------------------------------------end martim
938  }
939 
940  //************************************
941  //DRAW THE SAMPLED POINTS ON THE IMAGE
942  //************************************
943 
944  if(_circleVisualizationMode==0)
945  {
946  drawSampledLinesPerspectiveYARP(_model3dPointsMat, weightedMeanX,weightedMeanY,weightedMeanZ, _yarpImage,_perspectiveFx, _perspectiveFy, _perspectiveCx, _perspectiveCy, 255, 255, 255, meanU, meanV);
947  }
948  if(_circleVisualizationMode==1)
949  {
950  if(_seeingObject)
951  drawContourPerspectiveYARP(_visualization3dPointsMat, weightedMeanX,weightedMeanY,weightedMeanZ, _yarpImage,_perspectiveFx, _perspectiveFy, _perspectiveCx, _perspectiveCy, 0, 255, 0, meanU, meanV);
952  else
953  drawContourPerspectiveYARP(_visualization3dPointsMat, weightedMeanX,weightedMeanY,weightedMeanZ, _yarpImage,_perspectiveFx, _perspectiveFy, _perspectiveCx, _perspectiveCy, 255,255, 0, meanU, meanV);
954  }
955 
956  //******************************************
957  //WRITE ESTIMATES TO THE SCREEN, SECOND PART
958  //******************************************
959  //cout<<"\033[33;1m"<<setw(8)<<(int)meanU;
960  //cout<<setw(5)<<(int)meanV<<"\033[0m";
961  //if(_firstFrame==false)
962  //{
963  // cout<<setw(5)<<"\033[37;1m"<<setw(8)<<setiosflags(ios::fixed)<<setprecision(3)<<wholeCycle<<"\033[0m"<<endl;
964  //}
965  //else
966  //{
967  // cout<<"\033[37;1m -----\033[0m"<<endl;
968  // _firstFrame=false;
969  //}
970 
971  cout<<setw(8)<<(int)meanU;
972  cout<<setw(5)<<(int)meanV;
973  if(_firstFrame==false)
974  {
975  cout<<setw(5)<<setw(8)<<setiosflags(ios::fixed)<<setprecision(3)<<wholeCycle<<endl;
976  }
977  else
978  {
979  cout<<" -----"<<endl;
980  _firstFrame=false;
981  }
982 
983  Bottle& output=_outputDataPort.prepare();
984  output.clear();
985  output.addFloat64(weightedMeanX/1000);//millimeters to meters
986  output.addFloat64(weightedMeanY/1000);//millimeters to meters
987  output.addFloat64(weightedMeanZ/1000);//millimeters to meters
988  output.addFloat64(maxLikelihood/exp((float)20.0));//normalizing likelihood
989  output.addFloat64(meanU);
990  output.addFloat64(meanV);
991  output.addFloat64(_seeingObject);
992 
993  //set the envelope for the output port
994  _outputDataPort.setEnvelope(_yarpTimestamp);
995 
996  _outputDataPort.write();
997 
998  if (_seeingObject && supplyUVdata)
999  {
1000  Bottle& outputUV=_outputUVDataPort.prepare();
1001 
1002  outputUV.clear();
1003  outputUV.addFloat64(meanU);
1004  outputUV.addFloat64(meanV);
1005 
1006  //set the envelope for the output port
1007  _outputUVDataPort.setEnvelope(_yarpTimestamp);
1008  _outputUVDataPort.write();
1009  }
1010 
1011  Vector& tempVector=_outputAttentionPort.prepare();
1012  tempVector.resize(5);
1013  if(maxLikelihood>_likelihoodThreshold)
1014  {
1015  tempVector(0) = (meanU-_rawImage->width/2)/(_rawImage->width/2)/1.5; //X=(U-width/2) / width
1016  tempVector(1) = (meanV-_rawImage->height/2)/(_rawImage->height/2)/1.5;//Y= -(V-height/2) / height
1017  //_lastU=meanU; I just keep it to the center of the image, for now.
1018  //_lastV=meanV;I just keep it to the center of the image, for now.
1019  }
1020  else //keep sending the old value, when likelihood is under threshold
1021  {
1022  tempVector(0) = ((_lastU-_rawImage->width/2)/(_rawImage->width/2)); //X=(U-width/2) / width
1023  tempVector(1) = ((_lastV-_rawImage->height/2)/(_rawImage->height/2));//Y= -(V-height/2) / height
1024  }
1025 
1026  tempVector(2) = 0;
1027  tempVector(3) = 0;
1028  tempVector(4) = _attentionOutput;
1029 
1030  //set the envelope for the output port
1031  _outputAttentionPort.setEnvelope(_yarpTimestamp);
1032  _outputAttentionPort.write();
1033 
1034  //************************************
1035  //Write the elaborated image to a file
1036  //************************************
1037  //I should use the output video port, instead.
1038  //write image to file, openCV.
1039  if(_saveImagesWithOpencv)
1040  {
1041  if(_frameCounter<1000) out << 0;
1042  if(_frameCounter<100) out << 0;
1043  if(_frameCounter<10) out << 0;
1044  out << _frameCounter;
1045  outputFileName=_saveImagesWithOpencvDir+out.str()+".jpeg";
1046  toCvMat(*_yarpImage).copyTo(cv::cvarrToMat(_rawImage));
1047  imwrite(outputFileName, cv::cvarrToMat(_rawImage));
1048  }
1049 
1050  //write the elaborated image on the output port.
1051  cv::Mat tmpMat=toCvMat(*_yarpImage);
1052  cvtColor(tmpMat,tmpMat,CV_BGR2RGB);
1053  _outputVideoPort.prepare() = fromCvMat<PixelRgb>(tmpMat);
1054 
1055  //set the envelope for the output port
1056  _outputVideoPort.setEnvelope(_yarpTimestamp);
1057  _outputVideoPort.write();
1058 
1059  _frameCounter++;
1060 
1061  //*******************
1062  //acquire a new image
1063  //*******************
1064  _yarpImage = _inputVideoPort.read(); //read one image from the buffer.
1065  _inputVideoPort.getEnvelope(_yarpTimestamp);
1066 
1067  toCvMat(*_yarpImage).copyTo(cv::cvarrToMat(_rawImage));
1068 
1069  //*************************************
1070  //transform the image in the YUV format
1071  //*************************************
1072  if(_colorTransfPolicy==0)
1073  {
1074  rgbToYuvBinImageLut(_rawImage,_transformedImage,_lut);
1075  }
1076  // else do nothing
1077  }
1078  //if initialization has not finished, do nothing.
1079 
1080  return true; //continue: in this case it means everything is fine.
1081 }
1082 
1083 double PF3DTracker::getPeriod()
1084 {
1085  return 0.0; // sync with incoming data
1086 }
1087 
1088 void PF3DTracker::drawSampledLinesPerspectiveYARP(CvMat* model3dPointsMat, float x, float y, float z, ImageOf<PixelRgb> *image,float _perspectiveFx,float _perspectiveFy ,float _perspectiveCx,float _perspectiveCy ,int R, int G, int B, float &meanU, float &meanV)
1089 {
1090 
1091  bool failure;
1092  //CvMat* uv=cvCreateMat(2,2*nPixels,CV_32FC1);
1093 
1094  //create a copy of the 3D original points.
1095  cvCopy(model3dPointsMat,_drawingMat);
1096 
1097  //****************************
1098  //ROTOTRANSLATE THE 3D POINTS.
1099  //****************************
1100  failure=place3dPointsPerspective(_drawingMat,x,y,z);
1101  //cout<<"rototraslated points:\n";
1102  //printMatrix(&model3dPointsDuplicate[0][0],2*nPixels,3);
1103 
1104  //***********************
1105  //PROJECT 3D POINTS TO 2D
1106  //***********************
1107  failure= perspective_projection(_drawingMat, _perspectiveFx, _perspectiveFy, _perspectiveCx, _perspectiveCy, _uv)!=0;
1108  if(failure)
1109  {
1110  yWarning("I had troubles projecting the points.");
1111  }
1112 
1113  //DRAW
1114  int conta,uPosition,vPosition;
1115  meanU=0;
1116  meanV=0;
1117  for(conta=0;conta<nPixels;conta++)
1118  {
1119  meanU=meanU+((float*)(_uv->data.ptr + _uv->step*0))[conta];
1120  meanV=meanV+((float*)(_uv->data.ptr + _uv->step*1))[conta];
1121 
1122  vPosition= (int)((float*)(_uv->data.ptr + _uv->step*1))[conta];
1123  uPosition= (int)((float*)(_uv->data.ptr + _uv->step*0))[conta];
1124  if((uPosition<_rawImage->width)&&(uPosition>=0)&&(vPosition<_rawImage->height)&&(vPosition>=0))
1125  {
1126  image->pixel(uPosition,vPosition)= PixelRgb(B,G,R);
1127  }
1128  vPosition= (int)((float*)(_uv->data.ptr + _uv->step*1))[conta+nPixels];
1129  uPosition= (int)((float*)(_uv->data.ptr + _uv->step*0))[conta+nPixels];
1130  if((uPosition<_rawImage->width)&&(uPosition>=0)&&(vPosition<_rawImage->height)&&(vPosition>=0))
1131  {
1132  image->pixel(uPosition,vPosition)= PixelRgb(B,G,R);
1133  }
1134 
1135  }
1136 
1137  meanU=floor(meanU/nPixels);
1138  meanV=floor(meanV/nPixels);
1139  if((meanU<_rawImage->width)&&(meanU>=0)&&(meanV<_rawImage->height)&&(meanV>=0))
1140  {
1141  image->pixel((int)meanU,(int)meanV)= PixelRgb(B,G,R);
1142  }
1143 
1144 }
1145 
1146 void PF3DTracker::drawContourPerspectiveYARP(CvMat* model3dPointsMat,float x, float y, float z, ImageOf<PixelRgb> *image,float _perspectiveFx,float _perspectiveFy ,float _perspectiveCx,float _perspectiveCy ,int R, int G, int B, float &meanU, float &meanV)
1147 {
1148 
1149  bool failure;
1150  //CvMat* uv=cvCreateMat(2,2*nPixels,CV_32FC1);
1151 
1152  //create a copy of the 3D original points.
1153  cvCopy(model3dPointsMat,_drawingMat);
1154 
1155  //****************************
1156  //ROTOTRANSLATE THE 3D POINTS.
1157  //****************************
1158  failure=place3dPointsPerspective(_drawingMat,x,y,z);
1159  //cout<<"rototraslated points:\n";
1160  //printMatrix(&model3dPointsDuplicate[0][0],2*nPixels,3);
1161 
1162  //***********************
1163  //PROJECT 3D POINTS TO 2D
1164  //***********************
1165  failure= perspective_projection(_drawingMat, _perspectiveFx, _perspectiveFy, _perspectiveCx, _perspectiveCy, _uv)!=0;
1166  if(failure)
1167  {
1168  yWarning("I had troubles projecting the points.");
1169  }
1170 
1171  //****
1172  //Draw
1173  //****
1174  int conta,cippa,lippa,uPosition,vPosition;
1175  meanU=0;
1176  meanV=0;
1177  for(conta=0;conta<nPixels;conta++)
1178  {
1179  meanV=meanV+((float*)(_uv->data.ptr + _uv->step*1))[conta];
1180  meanU=meanU+((float*)(_uv->data.ptr + _uv->step*0))[conta];
1181 
1182  for(lippa=-2;lippa<3;lippa++)
1183  for(cippa=-2;cippa<3;cippa++)
1184  {
1185  vPosition= (int)(((float*)(_uv->data.ptr + _uv->step*1))[conta])+lippa-1;
1186  uPosition= (int)(((float*)(_uv->data.ptr + _uv->step*0))[conta])+cippa-1;
1187 
1188  if((uPosition<_rawImage->width)&&(uPosition>=0)&&(vPosition<_rawImage->height)&&(vPosition>=0))
1189  {
1190  image->pixel(uPosition,vPosition)= PixelRgb(B,G,R);
1191  }
1192 
1193  }
1194  }
1195 
1196  meanU=floor(meanU/nPixels);
1197  meanV=floor(meanV/nPixels);
1198  if((meanU<_rawImage->width)&&(meanU>=0)&&(meanV<_rawImage->height)&&(meanV>=0))
1199  {
1200  image->pixel((int)meanU,(int)meanV)= PixelRgb(B,G,R);
1201  }
1202 
1203 }
1204 
1205 bool PF3DTracker::computeTemplateHistogram(string imageFileName,string dataFileName)
1206 {
1207  int u,v,a,b,c;
1208  float usedPoints=0;
1209  //float histogram[YBins][UBins][VBins];
1210  //float* histogram;
1211  //histogram = new float[YBins*UBins*VBins]
1212  int dimensions;
1213  dimensions=3;
1214  int sizes[3]={YBins,UBins,VBins};
1215  //create histogram and allocate memory for it.
1216  CvMatND* histogram=cvCreateMatND(dimensions, sizes, CV_32FC1);
1217  if(histogram==0)
1218  {
1219  yWarning("computeTemplateHistogram: I wasn\'t able to allocate memory for histogram.");
1220  return true; //if I can't do it, I just quit the program.
1221  }
1222  //set content of the matrix to zero.
1223  cvSetZero(histogram);
1224  //load the image
1225  auto rawImage = cv::imread(imageFileName);
1226  if( ! rawImage.data) //load the image from file.
1227  {
1228  yWarning("I wasn't able to open the image file!");
1229  return true; //if I can't do it, I just quit the program.
1230  }
1231  cv::Mat transformedImage(rawImage.rows, rawImage.cols, CV_8UC3);
1232 
1233 
1234  //allocate space for the transformed image
1235 
1236  //transform the image in the YUV format
1237  rgbToYuvBinMatLut(rawImage,transformedImage,_lut);
1238 
1239  //count the frequencies of colour bins, build the histogram.
1240  for(v=0;v<rawImage.rows;v++)
1241  for(u=0;u<rawImage.cols;u++)
1242  {
1243  //discard white pixels [255,255,255].
1244 
1245  if(!(
1246  (((uchar*)(rawImage.data + rawImage.step*v))[u*3+0])==255 && (((uchar*)(rawImage.data + rawImage.step*v))[u*3+1])==255 && (((uchar*)(rawImage.data + rawImage.step*v))[u*3+2])==255)
1247 
1248  )
1249  {
1250 
1251  a=(((uchar*)(transformedImage.data + transformedImage.step*v))[u*3+0]);//Y bin
1252  b=(((uchar*)(transformedImage.data + transformedImage.step*v))[u*3+1]);//U bin
1253  c=(((uchar*)(transformedImage.data + transformedImage.step*v))[u*3+2]);//V bin
1254 
1255  //TEST printf("histogram->size[0].step,%d\n",histogram->dim[0].step); 256
1256  //TEST printf("histogram->size[1].step,%d\n",histogram->dim[1].step); 32
1257  //TEST printf("histogram->size[2].step,%d\n",histogram->dim[2].step); 4
1258  *((float*)(histogram->data.ptr + a*histogram->dim[0].step + b*histogram->dim[1].step + c*histogram->dim[2].step)) +=1;
1259 
1260  //initial pointer + Y*UBINS*VBINS*histogram->step + U*VBINS*histogram->step + V*histogram->step. RIGHT?
1261  //histogram[(yuvBinsImage[u][v][0])*UBins*VBins + (yuvBinsImage[u][v][1])*VBins + (yuvBinsImage[u][v][2]) ]+=1; //increment the correct bin counter.
1262  usedPoints+=1;
1263  }
1264  }
1265 
1266  //normalize
1267  if(usedPoints>0)
1268  {
1269  //histogram=histogram/usedPoints
1270  cvConvertScale( histogram, histogram, 1/usedPoints, 0 );
1271  }
1272 
1273  //write the computed histogram to a file.
1274  ofstream fout(dataFileName.c_str());//open file
1275  if(!fout) //confirm file opened
1276  {
1277  yWarning("computeTemplateHistogram: unable to open the csv file to store the histogram.");
1278  return true;
1279  }
1280  else
1281  {
1282  for(a=0;a<YBins;a++)
1283  {
1284  for(b=0;b<UBins;b++)
1285  {
1286  for(c=0;c<VBins;c++)
1287  {
1288  fout<<*((float*)(histogram->data.ptr + a*histogram->dim[0].step + b*histogram->dim[1].step + c*histogram->dim[2].step))<<endl;
1289  }
1290  }
1291  }
1292  fout.close();
1293  }
1294 
1295  //clean memory up
1296  if (histogram != NULL)
1297  cvReleaseMatND(&histogram);
1298 
1299  return false;
1300 
1301 }
1302 
1303 bool PF3DTracker::readModelHistogram(CvMatND* histogram,const char fileName[])
1304 {
1305  int c1,c2,c3;
1306  char line[15];
1307 
1308  ifstream fin(fileName); //open file
1309  if(!fin) //confirm file opened
1310  {
1311  yWarning("unable to open the csv histogram file.");
1312  return true;
1313  }
1314  else
1315  {
1316  for(c1=0;c1<YBins;c1++)
1317  for(c2=0;c2<UBins;c2++)
1318  for(c3=0;c3<VBins;c3++)
1319  {
1320  fin.getline(line, 14);
1321  *((float*)(histogram->data.ptr + c1*histogram->dim[0].step + c2*histogram->dim[1].step + c3*histogram->dim[2].step))=(float)atof(line);
1322  }
1323  return false;
1324  }
1325 }
1326 
1327 bool PF3DTracker::readInitialmodel3dPoints(CvMat* points, string fileName)
1328 {
1329  int c1,c2;
1330  char line[15];
1331 
1332  ifstream fin(fileName.c_str()); //open file
1333  if(!fin) //confirm file opened
1334  {
1335  yWarning("unable to open the the 3D model file.");
1336  return true;
1337  }
1338  else
1339  {
1340  for(c1=0;c1<3;c1++)
1341  for(c2=0;c2<2*nPixels;c2++)
1342  {
1343  fin.getline(line, 14);
1344  ((float*)(points->data.ptr + points->step*c1))[c2]=(float)atof(line);
1345  }
1346  return false;
1347  }
1348 }
1349 
1350 bool PF3DTracker::readMotionModelMatrix(CvMat* points, string fileName)
1351 {
1352  int c1,c2;
1353  char line[15];
1354 
1355  ifstream fin(fileName.c_str());//open file
1356  if(!fin) //confirm file opened
1357  {
1358  yWarning("unable to open the motion model file.");
1359  return true;
1360  }
1361  else
1362  {
1363  for(c1=0;c1<7;c1++)
1364  for(c2=0;c2<7;c2++)
1365  {
1366  fin.getline(line, 14);
1367  cvmSet(points,c1,c2,atof(line));
1368  }
1369  return false;
1370  }
1371 }
1372 
1373 bool PF3DTracker::evaluateHypothesisPerspective(CvMat* model3dPointsMat,float x, float y, float z, CvMatND* modelHistogramMat, IplImage* transformedImage, float fx, float fy, float u0, float v0, float inside_outside, float &likelihood)
1374 {
1375 
1376  bool failure;
1377  float usedOuterPoints, usedInnerPoints;
1378 
1379  //create a copy of the 3D original points.
1380  cvCopy(model3dPointsMat,_drawingMat);
1381 
1382  //****************************
1383  //ROTOTRANSLATE THE 3D POINTS.
1384  //****************************
1385  failure=place3dPointsPerspective(_drawingMat,x,y,z);
1386 
1387  //***********************
1388  //PROJECT 3D POINTS TO 2D
1389  //***********************
1390  failure= perspective_projection(_drawingMat, _perspectiveFx, _perspectiveFy, _perspectiveCx, _perspectiveCy, _uv)!=0;
1391  if(failure)
1392  {
1393  yWarning("I had troubles projecting the points.");
1394  }
1395 
1396  computeHistogram(_uv, transformedImage, _innerHistogramMat, usedInnerPoints, _outerHistogramMat, usedOuterPoints);
1397  //if((usedInnerPoints<nPixels)||(usedOuterPoints<nPixels))
1398  // likelihood=0;
1399  //else
1400  failure=calculateLikelihood(_modelHistogramMat, _innerHistogramMat,_outerHistogramMat, inside_outside,likelihood);
1401 
1402  likelihood=exp(20*likelihood); //no need to divide: I'm normalizing later.
1403 
1404  //make hypotheses with pixels outside the image less likely.
1405  likelihood=likelihood*((float)usedInnerPoints/nPixels)*((float)usedInnerPoints/nPixels)*((float)usedOuterPoints/nPixels)*((float)usedOuterPoints/nPixels);
1406 
1407  return false;
1408 }
1409 
1410 bool PF3DTracker::evaluateHypothesisPerspectiveFromRgbImage(CvMat* model3dPointsMat,float x, float y, float z, CvMatND* modelHistogramMat, IplImage *image, float fx, float fy, float u0, float v0, float inside_outside, float &likelihood)
1411 {
1412 //TODO
1413 
1414  bool failure;
1415  float usedOuterPoints, usedInnerPoints;
1416 
1417  //create a copy of the 3D original points.
1418  cvCopy(model3dPointsMat,_drawingMat);
1419 
1420  //****************************
1421  //ROTOTRANSLATE THE 3D POINTS.
1422  //****************************
1423  failure=place3dPointsPerspective(_drawingMat,x,y,z);
1424 
1425  //***********************
1426  //PROJECT 3D POINTS TO 2D
1427  //***********************
1428  failure= perspective_projection(_drawingMat, _perspectiveFx, _perspectiveFy, _perspectiveCx, _perspectiveCy, _uv)!=0;
1429  if(failure)
1430  {
1431  yWarning("I had troubles projecting the points.");
1432  }
1433 
1434  computeHistogramFromRgbImage(_uv, image, _innerHistogramMat, usedInnerPoints, _outerHistogramMat, usedOuterPoints);
1435  //if((usedInnerPoints<nPixels)||(usedOuterPoints<nPixels))
1436  // likelihood=0;
1437  //else
1438  failure=calculateLikelihood(_modelHistogramMat, _innerHistogramMat, _outerHistogramMat, inside_outside,likelihood);
1439 
1440  likelihood=exp(20*likelihood); //no need to divide: I'm normalizing later.
1441 
1442  //make hypotheses with pixels outside the image less likely.
1443  likelihood=likelihood*((float)usedInnerPoints/nPixels)*((float)usedInnerPoints/nPixels)*((float)usedOuterPoints/nPixels)*((float)usedOuterPoints/nPixels);
1444 
1445  return false;
1446 }
1447 
1448 bool PF3DTracker::systematic_resampling(CvMat* oldParticlesState, CvMat* oldParticlesWeights, CvMat* newParticlesState, CvMat* cumWeight)
1449 {
1450  //function [newParticlesState] = systematic_resampling(oldParticlesWeight, oldParticlesState)
1451 
1452  double u; //random number [0,1)
1453  double sum;
1454  int c1;
1455  int rIndex; //index of the randomized array
1456  int cIndex; //index of the cumulative weight array. cIndex -1 indicates which particle we think of resampling.
1457  int npIndex; //%new particle index, tells me how many particles have been created so far.
1458  int numParticlesToGenerate = _nParticles - _numParticlesReceived; //martim
1459 
1460  //%N is the number of particles.
1461  //[lines, N] = size(oldParticlesWeight);
1462  //in CPP, _nParticles is the number of particles.
1463 
1464  //%NORMALIZE THE WEIGHTS, so that sum(oldParticles)=1.
1465  //oldParticlesWeight = oldParticlesWeight / sum(oldParticlesWeight);
1466  sum=0;
1467  for(c1=0;c1<_nParticles;c1++)
1468  {
1469  sum+=((float*)(oldParticlesWeights->data.ptr + oldParticlesWeights->step*0))[c1];
1470  }
1471  for(c1=0;c1<_nParticles;c1++)
1472  {
1473  ((float*)(oldParticlesWeights->data.ptr + oldParticlesWeights->step*0))[c1] = (((float*)(oldParticlesWeights->data.ptr + oldParticlesWeights->step*0))[c1])/(float)sum;
1474  }
1475 
1476  //%GENERATE N RANDOM VALUES
1477  //u = rand(1)/N; %random value [0,1/N)
1478  u=1/(double)numParticlesToGenerate*((double)rand()/(double)RAND_MAX); //martim
1479 
1480  //%the randomized values are going to be u, u+1/N, u+2/N, etc.
1481  //%instread of accessing this vector, the elements are computed on the fly:
1482  //%randomVector(a)= (a-1)/N+u.
1483 
1484  //%COMPUTE THE ARRAY OF CUMULATIVE WEIGHTS
1485  //cumWeight=zeros(1,N+1);
1486  //cumWeight[0]=0;
1487  ((float*)(cumWeight->data.ptr))[0]=0;
1488  for(c1=0;c1<_nParticles;c1++)
1489  {
1490  //cumWeight[c1+1]=cumWeight[c1]+oldParticlesWeight[c1];
1491 
1492  ((float*)(cumWeight->data.ptr))[c1+1]=((float*)(cumWeight->data.ptr))[c1]+((float*)(oldParticlesWeights->data.ptr + oldParticlesWeights->step*0))[c1];
1493  //cout<<"cumulative at position "<<c1+1<<" = "<<((float*)(cumWeight->data.ptr))[c1+1]<<endl;
1494 
1495  }
1496  //CHECK IF THERE IS SOME ROUNDING ERROR IN THE END OF THE ARRAY.
1497  //if(cumWeight[_nParticles]!=1)
1498  if(((float*)(cumWeight->data.ptr))[_nParticles]!=1)
1499  {
1500  //fprintf('rounding error?\n');
1501  //printf("cumWeight[_nParticles]==%15.10e\n",((float*)(cumWeight->data.ptr))[_nParticles]);
1502  ((float*)(cumWeight->data.ptr))[_nParticles]=1;
1503  if( ((float*)(cumWeight->data.ptr))[_nParticles]!=1)
1504  {
1505  //printf("still different\n");
1506  }
1507  else
1508  {
1509  //printf("now it-s ok\n");
1510  }
1511  }
1512 
1513  //cout<<"cumulative at position "<<_nParticles-1<<" = "<<((float*)(cumWeight->data.ptr))[_nParticles-1]<<endl;
1514  //cout<<"cumulative at position "<<_nParticles<<" = "<<((float*)(cumWeight->data.ptr))[_nParticles]<<endl;
1515 
1516  //%PERFORM THE ACTUAL RESAMPLING
1517  rIndex=0; //index of the randomized array
1518  cIndex=1; //index of the cumulative weight array. cIndex -1 indicates which particle we think of resampling.
1519  npIndex=0; //new particle index, tells me how many particles have been created so far.
1520 
1521  while(npIndex < numParticlesToGenerate) //martim
1522  {
1523  //siamo sicuri che deve essere >=? ??? !!! WARNING
1524  if(((float*)(cumWeight->data.ptr))[cIndex]>=(double)rIndex/(double)numParticlesToGenerate+u) //martim
1525  {
1526  //%particle cIndex-1 should be copied.
1527  //printf("replicating particle %d\n",cIndex-1);
1528  //newParticlesState(npIndex)=oldParticlesState(cIndex-1);
1529  ((float*)(newParticlesState->data.ptr + newParticlesState->step*0))[npIndex]=((float*)(oldParticlesState->data.ptr + oldParticlesState->step*0))[cIndex-1];
1530  ((float*)(newParticlesState->data.ptr + newParticlesState->step*1))[npIndex]=((float*)(oldParticlesState->data.ptr + oldParticlesState->step*1))[cIndex-1];
1531  ((float*)(newParticlesState->data.ptr + newParticlesState->step*2))[npIndex]=((float*)(oldParticlesState->data.ptr + oldParticlesState->step*2))[cIndex-1];
1532  ((float*)(newParticlesState->data.ptr + newParticlesState->step*3))[npIndex]=((float*)(oldParticlesState->data.ptr + oldParticlesState->step*3))[cIndex-1];
1533  ((float*)(newParticlesState->data.ptr + newParticlesState->step*4))[npIndex]=((float*)(oldParticlesState->data.ptr + oldParticlesState->step*4))[cIndex-1];
1534  ((float*)(newParticlesState->data.ptr + newParticlesState->step*5))[npIndex]=((float*)(oldParticlesState->data.ptr + oldParticlesState->step*5))[cIndex-1];
1535  ((float*)(newParticlesState->data.ptr + newParticlesState->step*6))[npIndex]=0; //initializing weight
1536  rIndex=rIndex+1;
1537  npIndex=npIndex+1;
1538  }
1539  else
1540  {
1541  //printf("not replicating particle %d\n",cIndex-1);
1542  cIndex=cIndex+1;
1543  }
1544  }
1545 
1546  return false;
1547 }
1548 
1549 bool PF3DTracker::systematicR(CvMat* inState, CvMat* weights, CvMat* outState)
1550 {
1551  float N=(float)_nParticles;
1552  float s=1.0F/N;
1553 
1554  cvZero(_nChildren);
1555 
1556  cvCopy(_ramp,_label); //label should be like: 1,2,3,4,5,6...
1557 
1558  float auxw=0;
1559  int li=0; //label of the current point
1560 
1561  //initialization
1562  float T;
1563  T=s*((float)rand()/(float)RAND_MAX); //random number between 0 and s.
1564 
1565  int j=1;
1566  float Q=0;
1567  int i=0;
1568 
1569  cvRandArr( &rngState, _u, CV_RAND_UNI, cvScalar(0), cvScalar(1));
1570 
1571  while((T<1) && (i<_nParticles)) //the second part of the condition is a hack:
1572  // the loop sometimes doesn't stop without it.
1573  {
1574  if((Q>T) && (i<_nParticles))//the second part of the condition is a hack:
1575  // the loop sometimes doesn't stop without it.
1576  {
1577  T=T+s;
1578 
1579  ((float*)(_nChildren->data.ptr))[li-1]+=1;
1580  }
1581  else //the first time it passes by here.
1582  {
1583  i=(int)(floor((N-j+1)*(((float*)(_u->data.ptr))[j-1]))+j); //De Freitas uses "fix" in matlab... I guess floor should do, in this case. MAYBE NOT?
1584 
1585  auxw=((float*)(weights->data.ptr))[i-1];
1586 
1587  li=(int)((float*)(_label->data.ptr))[i-1]; //C'E' QUALCOSA CHE NON VA QUI.
1588  Q=Q+auxw;
1589 
1590  ((float*)(weights->data.ptr))[i-1]=((float*)(weights->data.ptr))[j-1];
1591  ((float*)(_label->data.ptr))[i-1] = ((float*)(_label->data.ptr))[j-1];
1592  j=j+1;
1593  }
1594  }
1595 
1596  //COPY STUFF. I SHOULD COPY THE STATE OF THE PARTICLES, HERE.
1597  int index=1;
1598  for(i=0;i<N;i++)
1599  {
1600  if(((float*)(_nChildren->data.ptr))[i]>0)
1601  {
1602  for(j=index;(j<index+((float*)(_nChildren->data.ptr))[i])&&(j<_nParticles+1);j++) //WARNING: ??? !!! I MIGHT WELL HAVE MESSED SOMETHING UP HERE.
1603  {
1604  ((float*)(outState->data.ptr + outState->step*0))[j-1]=((float*)(inState->data.ptr + inState->step*0))[i];
1605  ((float*)(outState->data.ptr + outState->step*1))[j-1]=((float*)(inState->data.ptr + inState->step*1))[i];
1606  ((float*)(outState->data.ptr + outState->step*2))[j-1]=((float*)(inState->data.ptr + inState->step*2))[i];
1607  ((float*)(outState->data.ptr + outState->step*3))[j-1]=((float*)(inState->data.ptr + inState->step*3))[i];
1608  ((float*)(outState->data.ptr + outState->step*4))[j-1]=((float*)(inState->data.ptr + inState->step*4))[i];
1609  ((float*)(outState->data.ptr + outState->step*5))[j-1]=((float*)(inState->data.ptr + inState->step*5))[i];
1610  }
1611  }
1612  index=index+(int)((float*)(_nChildren->data.ptr))[i];
1613  }
1614 
1615  return false;
1616 }
1617 
1618 bool PF3DTracker::place3dPointsPerspective(CvMat* points, float x, float y, float z)
1619 {
1620  //*********************
1621  // 0. Prepare some data
1622  //*********************
1623  float floorDistance=sqrt(x*x+y*y); //horizontal distance from the optical center to the ball
1624  float distance=sqrt(x*x+y*y+z*z); //distance from the optical center to the ball
1625 
1626  float cosAlpha=floorDistance/distance; //cosine of an angle needed for a rotation
1627  float sinAlpha=-z/distance; //sine of an angle needed for a rotation
1628  float cosBeta=x/floorDistance; //cosine of an angle needed for a rotation
1629  float sinBeta=y/floorDistance; //sine of an angle needed for a rotation
1630 
1631  //Rotation matrix Rz: [3 x 3]
1632  ((float*)(_rzMat->data.ptr + _rzMat->step*0))[0]= cosBeta;
1633  ((float*)(_rzMat->data.ptr + _rzMat->step*0))[1]= -sinBeta;
1634  ((float*)(_rzMat->data.ptr + _rzMat->step*0))[2]= 0;
1635  ((float*)(_rzMat->data.ptr + _rzMat->step*1))[0]= sinBeta;
1636  ((float*)(_rzMat->data.ptr + _rzMat->step*1))[1]= cosBeta;
1637  ((float*)(_rzMat->data.ptr + _rzMat->step*1))[2]= 0;
1638  ((float*)(_rzMat->data.ptr + _rzMat->step*2))[0]= 0;
1639  ((float*)(_rzMat->data.ptr + _rzMat->step*2))[1]= 0;
1640  ((float*)(_rzMat->data.ptr + _rzMat->step*2))[2]= 1;
1641 
1642  //Rotation matrix Ry: [3 x 3]
1643  ((float*)(_ryMat->data.ptr + _ryMat->step*0))[0]= cosAlpha;
1644  ((float*)(_ryMat->data.ptr + _ryMat->step*0))[1]= 0;
1645  ((float*)(_ryMat->data.ptr + _ryMat->step*0))[2]= sinAlpha;
1646  ((float*)(_ryMat->data.ptr + _ryMat->step*1))[0]= 0;
1647  ((float*)(_ryMat->data.ptr + _ryMat->step*1))[1]= 1;
1648  ((float*)(_ryMat->data.ptr + _ryMat->step*1))[2]= 0;
1649  ((float*)(_ryMat->data.ptr + _ryMat->step*2))[0]= -sinAlpha;
1650  ((float*)(_ryMat->data.ptr + _ryMat->step*2))[1]= 0;
1651  ((float*)(_ryMat->data.ptr + _ryMat->step*2))[2]= cosAlpha;
1652 
1653  //***********************************
1654  // 1. Rotate points around the Y axis
1655  //***********************************
1656  //Multiply Ry by points
1657  //_points2Mat=_ryMat*points [3 x 2*nPixels]
1658  cvMatMul(_ryMat,points,_points2Mat);
1659 
1660  //*****************************************
1661  // 2. Apply a vertical and horizontal shift
1662  //*****************************************
1663  //sum floorDistance to all the elements in the first row of "points2".
1664  cvSet(_tempMat1,cvScalar(floorDistance)); //set all elements of _tempMat1 to the value of "floorDistance"
1665  cvAdd(_p2Mat1,_tempMat1,_p2Mat1); //_p2Mat1=_p2Mat1+_tempMat1.
1666 
1667  //sum z to the third row of "points2".
1668  cvSet(_tempMat3,cvScalar(z)); //set all elements of _tempMat3 to the value of "z"
1669  cvAdd(_p2Mat3,_tempMat3,_p2Mat3); //_p2Mat3=_p2Mat3+_tempMat3.
1670 
1671  //**********************************
1672  //3. Rotate points around the Z axis
1673  //**********************************
1674  //Multiply RZ by "points2", put the result in "points"
1675  //points=_rzMat*_points2Mat [3 x 2*nPixels]
1676  cvMatMul(_rzMat,_points2Mat,points);
1677 
1678  return false;
1679 }
1680 
1681 
1682 int PF3DTracker::perspective_projection(CvMat* xyz, float fx, float fy, float cx, float cy, CvMat* uv)
1683 {
1684  //fill the projection matrix with the current values.
1685  ((float*)(_projectionMat->data.ptr + _projectionMat->step*0))[0]= fx;
1686  ((float*)(_projectionMat->data.ptr + _projectionMat->step*0))[1]= 0;
1687  ((float*)(_projectionMat->data.ptr + _projectionMat->step*0))[2]= cx;
1688  ((float*)(_projectionMat->data.ptr + _projectionMat->step*1))[0]= 0;
1689  ((float*)(_projectionMat->data.ptr + _projectionMat->step*1))[1]= fy;
1690  ((float*)(_projectionMat->data.ptr + _projectionMat->step*1))[2]= cy;
1691 
1692 // int a,b;
1693 // for(a=0;a<2;a++)
1694 // {
1695 // cout<<"LINE ";
1696 // for(b=0;b<3;b++)
1697 // {
1698 // cout<<((float*)(_projectionMat->data.ptr + _projectionMat->step*a))[b]<<",";
1699 // }
1700 // cout<<"\n";
1701 // }
1702 // cout<<"\n";
1703 
1704 // for(a=0;a<3;a++)
1705 // {
1706 // cout<<"LINE ";
1707 // for(b=0;b<2*nPixels;b++)
1708 // {
1709 // cout<<((float*)(xyz->data.ptr + xyz->step*a))[b]<<",";
1710 // }
1711 // cout<<"\n";
1712 // }
1713 // cout<<"\n";
1714 
1715  //#####################################################
1716  //For every column of XYZ, divide each element by Z(i).
1717  //#####################################################
1718  //setup
1719 
1720  cvInitMatHeader( _xyzMat1, 1, 2*nPixels, CV_32FC1, xyz->data.ptr );
1721  cvInitMatHeader( _xyzMat2, 1, 2*nPixels, CV_32FC1, xyz->data.ptr + xyz->step*1);
1722  cvInitMatHeader( _xyzMat3, 1, 2*nPixels, CV_32FC1, xyz->data.ptr + xyz->step*2);
1723 
1724  //divide X (the first line of xyz) by Z (the third line of xyz).
1725  cvDiv( _xyzMat1, _xyzMat3, _xyzMat1, 1 );
1726 
1727 // for(a=0;a<3;a++)
1728 // {
1729 // cout<<"LINE ";
1730 // for(b=0;b<2*nPixels;b++)
1731 // {
1732 // cout<<((float*)(xyz->data.ptr + xyz->step*a))[b]<<",";
1733 // }
1734 // cout<<"\n";
1735 // }
1736 // cout<<"\n";
1737 
1738  //divide Y (the second line of xyz) by Z (the third line of xyz).
1739  cvDiv( _xyzMat2, _xyzMat3, _xyzMat2, 1 );
1740 
1741 // for(a=0;a<3;a++)
1742 // {
1743 // cout<<"LINE ";
1744 // for(b=0;b<2*nPixels;b++)
1745 // {
1746 // cout<<((float*)(xyz->data.ptr + xyz->step*a))[b]<<",";
1747 // }
1748 // cout<<"\n";
1749 // }
1750 // cout<<"\n";
1751 
1752  //set all elements of Z to 1.
1753  cvSet(_xyzMat3,(cvScalar(1)));
1754 
1755 // for(a=0;a<3;a++)
1756 // {
1757 // cout<<"LINE ";
1758 // for(b=0;b<2*nPixels;b++)
1759 // {
1760 // cout<<((float*)(xyz->data.ptr + xyz->step*a))[b]<<",";
1761 // }
1762 // cout<<"\n";
1763 // }
1764 // cout<<"\n";
1765 
1766  //#########################
1767  //UV=projectionMat*(XYZ/Z).
1768  //#########################
1769  cvMatMul(_projectionMat,xyz,uv);
1770 
1771 /* for(a=0;a<2;a++)
1772  {
1773  cout<<"LINE ";
1774  for(b=0;b<2*nPixels;b++)
1775  {
1776  cout<<((float*)(_uv->data.ptr + _uv->step*a))[b]<<",";
1777  }
1778  cout<<"\n";
1779  }
1780  cout<<"\n";*/
1781  return 0;
1782 
1783 }
1784 
1785 bool PF3DTracker::computeHistogram(CvMat* uv, IplImage* transformedImage, CvMatND* innerHistogramMat, float &usedInnerPoints, CvMatND* outerHistogramMat, float &usedOuterPoints)
1786 {
1787  int count;
1788  int u, v, a, b, c;
1789 
1790  usedInnerPoints=0;
1791  cvSetZero(innerHistogramMat);
1792 
1793  for(count=0;count<nPixels;count++)
1794  {
1795  u=(int)((float*)(uv->data.ptr + uv->step*0))[count]; //truncating ??? !!! warning
1796  v=(int)((float*)(uv->data.ptr + uv->step*1))[count]; //truncating ??? !!! warning
1797  if((v<transformedImage->height)&&(v>=0)&&(u<transformedImage->width)&&(u>=0))
1798  {
1799  a=(((uchar*)(transformedImage->imageData + transformedImage->widthStep*v))[u*3+0]);//Y bin
1800  b=(((uchar*)(transformedImage->imageData + transformedImage->widthStep*v))[u*3+1]);//U bin
1801  c=(((uchar*)(transformedImage->imageData + transformedImage->widthStep*v))[u*3+2]);//V bin
1802  *((float*)(innerHistogramMat->data.ptr + a*innerHistogramMat->dim[0].step + b*innerHistogramMat->dim[1].step + c*innerHistogramMat->dim[2].step)) +=1;
1803  usedInnerPoints+=1;
1804  }
1805  }
1806 
1807  if(usedInnerPoints>0)
1808  {
1809  cvConvertScale(innerHistogramMat, innerHistogramMat, 1/usedInnerPoints, 0 );
1810  }
1811 
1812  usedOuterPoints=0;
1813  cvSetZero(outerHistogramMat);
1814  for(count=nPixels;count<2*nPixels;count++)
1815  {
1816  u=(int)((float*)(uv->data.ptr + uv->step*0))[count]; //truncating ??? !!! warning
1817  v=(int)((float*)(uv->data.ptr + uv->step*1))[count]; //truncating ??? !!! warning
1818  if((v<transformedImage->height)&&(v>=0)&&(u<transformedImage->width)&&(u>=0))
1819  {
1820  a=(((uchar*)(transformedImage->imageData + transformedImage->widthStep*v))[u*3+0]);//Y bin
1821  b=(((uchar*)(transformedImage->imageData + transformedImage->widthStep*v))[u*3+1]);//U bin
1822  c=(((uchar*)(transformedImage->imageData + transformedImage->widthStep*v))[u*3+2]);//V bin
1823  *((float*)(outerHistogramMat->data.ptr + a*outerHistogramMat->dim[0].step + b*outerHistogramMat->dim[1].step + c*outerHistogramMat->dim[2].step)) +=1;
1824  usedOuterPoints+=1;
1825  }
1826  }
1827  if(usedOuterPoints>0)
1828  {
1829  cvConvertScale(outerHistogramMat, outerHistogramMat, 1/usedOuterPoints, 0 );
1830  }
1831 
1832  return false;
1833 }
1834 
1835 bool PF3DTracker::computeHistogramFromRgbImage(CvMat* uv, IplImage *image, CvMatND* innerHistogramMat, float &usedInnerPoints, CvMatND* outerHistogramMat, float &usedOuterPoints)
1836 {
1837  int count;
1838  int u,v;
1839  int r,g,b;
1840  int index;
1841 
1843  //INNER/
1845  usedInnerPoints=0;
1846  cvZero(innerHistogramMat);
1847  for(count=0;count<nPixels;count++)
1848  {
1849  u=(int)((float*)(uv->data.ptr + uv->step*0))[count]; //truncating ??? !!! warning
1850  v=(int)((float*)(uv->data.ptr + uv->step*1))[count]; //truncating ??? !!! warning
1851  if((v<image->height)&&(v>=0)&&(u<image->width)&&(u>=0))
1852  {
1853  //transform the color from RGB to HSI bin.
1854  r=(((uchar*)(image->imageData + image->widthStep*v))[u*3+0]);
1855  g=(((uchar*)(image->imageData + image->widthStep*v))[u*3+1]);
1856  b=(((uchar*)(image->imageData + image->widthStep*v))[u*3+2]);
1857  index=r*65536+g*256+b;
1858  //increase the bin counter
1859  *((float*)(innerHistogramMat->data.ptr + _lut[index].y*innerHistogramMat->dim[0].step + _lut[index].u*innerHistogramMat->dim[1].step + _lut[index].v*innerHistogramMat->dim[2].step)) +=1;
1860  usedInnerPoints+=1;
1861  }
1862  }
1863 
1864  //cout<<"inner points="<<usedInnerPoints<<endl;
1865  if(usedInnerPoints>0)
1866  {
1867  cvConvertScale( innerHistogramMat, innerHistogramMat, 1/usedInnerPoints, 0 );
1868  }
1869 
1871  //OUTER/
1873  usedOuterPoints=0;
1874  cvZero(outerHistogramMat);
1875  for(count=nPixels;count<2*nPixels;count++)
1876  {
1877  u=(int)((float*)(uv->data.ptr + uv->step*0))[count]; //truncating ??? !!! warning
1878  v=(int)((float*)(uv->data.ptr + uv->step*1))[count]; //truncating ??? !!! warning
1879  if((v<image->height)&&(v>=0)&&(u<image->width)&&(u>=0))
1880  {
1881  //transform the color from RGB to HSI bin.
1882  r=(((uchar*)(image->imageData + image->widthStep*v))[u*3+0]);
1883  g=(((uchar*)(image->imageData + image->widthStep*v))[u*3+1]);
1884  b=(((uchar*)(image->imageData + image->widthStep*v))[u*3+2]);
1885  index=r*65536+g*256+b;
1886  //increase the bin counter
1887  *((float*)(outerHistogramMat->data.ptr + _lut[index].y*outerHistogramMat->dim[0].step + _lut[index].u*outerHistogramMat->dim[1].step + _lut[index].v*outerHistogramMat->dim[2].step)) +=1;
1888  usedOuterPoints+=1;
1889  }
1890  }
1891  if(usedOuterPoints>0)
1892  {
1893  cvConvertScale( outerHistogramMat, outerHistogramMat, 1/usedOuterPoints, 0 );
1894  }
1895 
1896  return false;
1897 }
1898 
1899 bool PF3DTracker::calculateLikelihood(CvMatND* templateHistogramMat, CvMatND* innerHistogramMat, CvMatND* outerHistogramMat, float inside_outside, float &likelihood)
1900 {
1901  likelihood=0;
1902  int a,b,c;
1903  for(a=0;a<YBins;a++)
1904  for(b=0;b<UBins;b++)
1905  for(c=0;c<VBins;c++)
1906  {
1907  likelihood=likelihood + sqrt(
1908  *((float*)(innerHistogramMat->data.ptr + a*innerHistogramMat->dim[0].step + b*innerHistogramMat->dim[1].step + c*innerHistogramMat->dim[2].step)) *
1909  *((float*)(templateHistogramMat->data.ptr + a*templateHistogramMat->dim[0].step + b*templateHistogramMat->dim[1].step + c*templateHistogramMat->dim[2].step)))
1910  - _inside_outside_difference_weight*sqrt(
1911  *((float*)(outerHistogramMat->data.ptr + a*outerHistogramMat->dim[0].step + b*outerHistogramMat->dim[1].step + c*outerHistogramMat->dim[2].step)) *
1912  *((float*)(innerHistogramMat->data.ptr + a*innerHistogramMat->dim[0].step + b*innerHistogramMat->dim[1].step + c*innerHistogramMat->dim[2].step)));
1913 
1914  }
1915  likelihood=(likelihood+_inside_outside_difference_weight)/(1+_inside_outside_difference_weight);
1916  if(likelihood<0)
1917  yWarning("LIKELIHOOD<0!!!");
1918  return false;
1919 }
1920 
1921 bool PF3DTracker::testOpenCv()
1922 {
1923  int type;
1924  bool failure;
1925  type=CV_32FC1; //32 bits, signed, one channel.
1926  CvMat* points;
1927 
1928  points = cvCreateMat( 3, 2*nPixels, type );
1929  readInitialmodel3dPoints(points, "models/initial_ball_points_46mm_30percent.csv");
1930 
1931  failure = place3dPointsPerspective(points,100,200,1000); //Funziona...
1932 
1933  failure = perspective_projection(points, _perspectiveFx, _perspectiveFy, _perspectiveCx, _perspectiveCy, _uv)!=0;
1934 
1935  return true;
1936 
1937 }
1938 
1939 void printMat(CvMat* A)
1940 {
1941  int a,b;
1942  for(a=0;a<A->rows;a++)
1943  {
1944  for(b=0;b<A->cols;b++)
1945  {
1946  cout<<((float*)(A->data.ptr + A->step*a))[b]<<",";
1947  }
1948  cout<<"\n";
1949  }
1950 }
Copyright: (C) 2009 RobotCub Consortium Authors: Matteo Taiana, Ugo Pattacini CopyPolicy: Released un...