stereo-vision
All Data Structures Namespaces Functions Modules Pages
RGBD2PointCloud.cpp
1 #include <cmath>
2 #include <string>
3 #include <iostream>
4 #include <yarp/conf/numeric.h>
5 
6 #include <yarp/os/all.h>
7 #include <yarp/dev/all.h>
8 #include <yarp/sig/all.h>
9 #include <yarp/math/Math.h>
10 #include <yarp/conf/system.h>
11 #include <yarp/sig/Image.h>
12 #include <yarp/sig/Vector.h>
13 
14 #include "RGBD2PointCloud.hpp"
15 #include <yarp/os/NetFloat32.h>
16 
17 YARP_BEGIN_PACK
18 typedef struct {
19  yarp::os::NetFloat32 x;
20  yarp::os::NetFloat32 y;
21  yarp::os::NetFloat32 z;
22  char rgba[4];
23 } PC_Point;
24 YARP_END_PACK
25 
26 using namespace std;
27 using namespace yarp::os;
28 using namespace yarp::sig;
29 using namespace yarp::dev;
30 using namespace yarp::math;
31 
32 RGBD2PointCloud::RGBD2PointCloud()
33 {
34  use_RGBD_client = false;
35  width = 0;
36  height = 0;
37  rosPC_data.header.seq = 0;
38  frame_id = "/depth_center";
39  scaleFactor = 1;
40  mirrorX = 1;
41  mirrorY = 1;
42  mirrorZ = 1;
43  publishTF = false;
44 }
45 
46 RGBD2PointCloud::~RGBD2PointCloud()
47 {
48 
49 }
50 
51 Bottle RGBD2PointCloud::get_3D_points(const vector<Vector> &pixels, bool color)
52 {
53  int index;
54  Bottle point_cloud;
55  Bottle &points=point_cloud.addList();
56  uint32_t c;
57 
58  for (size_t i=0; i<pixels.size(); i++)
59  {
60  Vector pixel=pixels[i];
61  int u=pixel[0];
62  int v=pixel[1];
63  index=rosPC_data.width*(v) + u;
64 
65  char *pointer = (char*) &rosPC_data.data[0];
66  PC_Point *iter = (PC_Point*) &pointer[index*sizeof(PC_Point)];
67 
68  Bottle &pp=points.addList();
69  pp.addFloat64(iter->x);
70  pp.addFloat64(iter->y);
71  pp.addFloat64(iter->z);
72 
73  if (color)
74  {
75  uint8_t a = iter->rgba[2];
76  pp.addInt32(a);
77  a = iter->rgba[1];
78  pp.addInt32(a);
79  a = iter->rgba[0];
80  pp.addInt32(a);
81  }
82  }
83 
84  return point_cloud;
85 }
86 
87 bool RGBD2PointCloud::attach(RpcServer &source)
88 {
89  return this->yarp().attachAsServer(source);
90 }
91 
92 bool RGBD2PointCloud::configure(ResourceFinder& rf)
93 {
94  //looking for ROS parameters
95  string topicName, nodeName;
96  Bottle& rosParam = rf.findGroup("ROS");
97 // if(!rosParam.isNull())
98  {
99  if (rosParam.check("ROS_topicName"))
100  {
101  topicName = rosParam.find("ROS_topicName").asString();
102  }
103  else
104  {
105  topicName = "/RGBD2PointCloud";
106  }
107  if (rosParam.check("ROS_nodeName"))
108  {
109  nodeName = rosParam.find("ROS_nodeName").asString();
110  }
111  else
112  {
113  nodeName = "/RGBD2PointCloudNode";
114  }
115 
116  }
117 
118 
119  if(rf.check("help"))
120  {
121  yInfo() << " Required parameters:";
122  yInfo() << "\tremoteImagePort: remote port streaming rgb images";
123  yInfo() << "\tremoteDepthPort: remote port streaming depth images";
124  yInfo() << " Optional parameters:";
125  yInfo() << "\tscale: scale factor to apply to depth data. For Gazebo has to be 1, for xtion sensor 0.001 in order to scale data to [m]";
126  yInfo() << "\tmirrorX, mirrorY, mirrorZ: add it to mirror the resulting point cloud on the corresponding axes (no param, just '--mirrorx')";
127  yInfo() << "\ttf: if present a tf will be published. This requires 7 parameters";
128  yInfo() << "\t reference frame name,\n\t x,y,z translations [m], \n\t roll, pitch, yaw rotations [rad]\n";
129  yInfo() << "\t For example --tf base_link 0.1 0.0 0.0 1.56 0.0 0.0 -- no parenthesis";
130  return false;
131  }
132 
133  if(rf.check("frame_id"))
134  {
135  frame_id = rf.find("frame_id").asString();
136  }
137 
138 
139  if(!rf.check("remoteImagePort") || !rf.check("remoteDepthPort"))
140  {
141  yError() << "Missing required parameters: remoteImagePort or remoteDepthPort";
142  return false;
143  }
144 
145  yInfo() << "Node name is " << nodeName;
146  yInfo() << "Topic name is " << topicName;
147 
148  rosNode = new yarp::os::Node(nodeName);
149  if(!pointCloud_outTopic.topic(topicName))
150  {
151  yError() << " opening " << topicName << " Topic, check your yarp-ROS network configuration\n";
152  return false;
153  }
154 
155  // TBD: This is for debug only. Implement later
156 // tf_broadcaster = new tf::TransformBroadcaster;
157 
158  std::string remoteImagePort_name = rf.find("remoteImagePort").asString();
159  std::string remoteDepthPort_name = rf.find("remoteDepthPort").asString();
160 
161  // cannot use because OpenNI2DeviceServer does not uses RGBDSensorWrapper yet
162  if(use_RGBD_client)
163  {
164  // TBD: make ports opened by this device to use custom names
165  yarp::os::Property params;
166  params.put("device", "RGBDSensorClient");
167  params.put("localImagePort","/RGBD2PointCloud/rgb/in:i");
168  params.put("localDepthPort","/RGBD2PointCloud/depth/in:i");
169  params.put("remoteImagePort",remoteImagePort_name);
170  params.put("remoteDepthPort",remoteDepthPort_name);
171  params.put("watchdog","100");
172  params.put("synchPolicy","latest");
173 
174  if(!RGBD_Client.open(params))
175  {
176  yError() << "Cannot open device";
177  return false;
178  }
179  }
180  else
181  {
182  bool ret = true;
183  ret &= imageFrame_inputPort.open("/RGBD2PointCloud/rgb/in:i");
184  ret &= depthFrame_inputPort.open("/RGBD2PointCloud/depth/in:i");
185  ret &= rpcPort.open("/RGBD2PointCloud/rpc");
186 
187  if(!ret)
188  {
189  yError() << "Cannot open required ports";
190  return false;
191  }
192 
193  attach(rpcPort);
194 
195  // TBD: make ports opened by this device to use custom names
196  ret &= yarp::os::Network::connect(remoteImagePort_name, "/RGBD2PointCloud/rgb/in:i");
197  ret &= yarp::os::Network::connect(remoteDepthPort_name, "/RGBD2PointCloud/depth/in:i");
198 
199  if(!ret)
200  {
201  yError() << "Cannot connect to remote ports";
202  return false;
203  }
204  else
205  yInfo() << "Connection is done!";
206  }
207 
208  //
209  // TBD: This is for debug only. Implement later
210  //
211 // Bottle tf = rf.findGroup("tf");
212 // if(!tf.isNull() )
213 // {
214 // if(tf.size() != 1+7)rosPC_data
215 // {
216 // yError() << "TF parameter must have 7 elements: reference frame name, x,y,z [m] translations, roll, pitch, yaw rotations [rad]\n" \
217 // "For example --tf base_link 0.1 0.0 0.0 1.56 0.0 0.0 -- no parenthesis";
218 // return false;
219 // }
220 //
221 // translation.resize(3);
222 // rotation.resize(3);
223 // rotation_frame_id = tf.get(1).asString();
224 // translation[0] = tf.get(2).asFloat64();
225 // translation[1] = tf.get(3).asFloat64();
226 // translation[2] = tf.get(4).asFloat64();
227 //
228 // rotation[0] = tf.get(5).asFloat64();
229 // rotation[1] = tf.get(6).asFloat64();
230 // rotation[2] = tf.get(7).asFloat64();
231 // }
232 
233  frame_id = rosParam.check("frame_id", Value(frame_id)).asString();
234  rf.check("mirrorX") ? mirrorX = -1 : mirrorX = 1;
235  rf.check("mirrorY") ? mirrorY = -1 : mirrorY = 1;
236  rf.check("mirrorZ") ? mirrorZ = -1 : mirrorZ = 1;
237 
238  yInfo() << "Mirrors: x=" << mirrorX << " y=" << mirrorY << " z= " << mirrorZ;
239  yInfo() << "Frame id: " << frame_id;
240  scaleFactor = rf.check("scale", Value(1)).asFloat64();
241 
242 
243  // Initialize the ROS pointCloud data with const values (width & height still unknown here)
244  rosPC_data.header.seq++;
245  rosPC_data.header.frame_id = frame_id;
246 
247  rosPC_data.fields.resize(4);
248  rosPC_data.fields[0].name = "x";
249  rosPC_data.fields[0].offset = 0; // offset in bytes from start of each point
250  rosPC_data.fields[0].datatype = 7; // 7 = FLOAT32
251  rosPC_data.fields[0].count = 1; // how many FLOAT32 used for 'x'
252 
253  rosPC_data.fields[1].name = "y";
254  rosPC_data.fields[1].offset = 4; // offset in bytes from start of each point
255  rosPC_data.fields[1].datatype = 7; // 7 = FLOAT32
256  rosPC_data.fields[1].count = 1; // how many FLOAT32 used for 'y'
257 
258  rosPC_data.fields[2].name = "z";
259  rosPC_data.fields[2].offset = 8; // offset in bytes from start of each point
260  rosPC_data.fields[2].datatype = 7; // 7 = FLOAT32
261  rosPC_data.fields[2].count = 1; // how many FLOAT32 used for 'z'
262 
263  rosPC_data.fields[3].name = "rgb";
264  rosPC_data.fields[3].offset = 12; // offset in bytes from start of each point
265  rosPC_data.fields[3].datatype = 6; // 6 = UINT32
266  rosPC_data.fields[3].count = 1; // how many UINT32 used for 'rgb'
267 
268 #if defined(YARP_BIG_ENDIAN)
269  rosPC_data.is_bigendian = true;
270 #elif defined(YARP_LITTLE_ENDIAN)
271  rosPC_data.is_bigendian = false;
272 #else
273  #error "Cannot detect endianness"
274 #endif
275 
276  yAssert(sizeof(PC_Point) == 16);
277  rosPC_data.point_step = sizeof(PC_Point);
278  rosPC_data.is_dense = true; // what this field actually means?? When is it false??
279  return true;
280 }
281 
282 double RGBD2PointCloud::getPeriod()
283 {
284  return 0.033;
285 }
286 
287 bool RGBD2PointCloud::updateModule()
288 {
289  static double start = yarp::os::Time::now();
290 
291  if(yarp::os::Time::now() - start > 3.0f)
292  {
293  yInfo() << "RGBD2PointCloud is running fine.";
294  start = yarp::os::Time::now();
295  }
296  return convertRGBD_2_XYZRGB();
297 }
298 
299 bool RGBD2PointCloud::convertRGBD_2_XYZRGB()
300 {
301  bool ret = imageFrame_inputPort.read(colorImage);
302  ret &= depthFrame_inputPort.read(depthImage);
303 
304  imageFrame_inputPort.getEnvelope(colorStamp);
305  depthFrame_inputPort.getEnvelope(depthStamp);
306 
307  if(!ret)
308  {
309  yError() << "Cannot read from ports";
310  return false;
311  }
312 
313  int d_width = depthImage.width();
314  int d_height = depthImage.height();
315  int c_width = colorImage.width();
316  int c_height = colorImage.height();
317 
318  if( (d_width != c_width) && (d_height != c_height) )
319  {
320  yError() << "Size does not match";
321  return false;
322  }
323 
324  double tmp_sec;
325  width = d_width;
326  height = d_height;
327 
328  rosPC_data.width = width;
329  rosPC_data.height = height;
330  rosPC_data.row_step = rosPC_data.point_step*width;
331 
332  rosPC_data.header.stamp.nsec = (NetUint32) (modf(depthStamp.getTime(), &tmp_sec)*1000000000);
333  rosPC_data.header.stamp.sec = (NetUint32) tmp_sec;
334  int index = 0;
335 
336  unsigned char* colorDataRaw = (unsigned char*)colorImage.getRawImage();
337  float* depthDataRaw = (float*)depthImage.getRawImage();
338 
339  // This data should be got at runtime from device (cameraInfo topic or rpc)
340  double hfov = 58 * 3.14 / 360;
341 
342  double fl = ((double)this->width) / (2.0 *tan(hfov/2.0));
343 
344  double fovHorizontalDeg = 58.62;
345  double halfFovHorizontalRad = fovHorizontalDeg*M_PI/360.0;
346 // double fovVerticalDeg = 45.666;
347 // double halfFovVerticalRad = fovVerticalDeg*M_PI/360.0;
348 
349  double f=(width/2)/sin(halfFovHorizontalRad)*cos(halfFovHorizontalRad); // / sqrt(2);
350 
351  PC_Point point;
352  rosPC_data.data.resize(width*height*rosPC_data.point_step);
353 
354  PC_Point *iter = (PC_Point*) &rosPC_data.data[0];
355 
356  double u, v, depth;
357  // convert depth to point cloud
358  for (int32_t j = 0; j < height; j++)
359  {
360  for (int32_t i = 0; i < width; i++)
361  {
362  depth = depthDataRaw[index++] * scaleFactor;
363  u = -(i - 0.5*(width-1));
364  v = (0.5*(height-1) -j);
365  point.x = -(NetFloat32) depth * u/f;
366  point.y = -(NetFloat32) depth * v/f;
367  point.z = (NetFloat32) depth;
368  int new_i;
369 // if( depth > 0)
370 // new_i = (i + (int) ( (0.03 *f/depth) + 0.5) );
371 // else
372  new_i = i;
373 
374 // if( (new_i >= width) || (new_i < 0))
375 // {
376 // point.rgba[2] = 0;
377 // point.rgba[1] = 0;
378 // point.rgba[0] = 0;
379 // }
380 // else
381 // {
382  point.rgba[2] = (uint8_t) colorDataRaw[(j*width*3) + new_i*3 +0];
383  point.rgba[1] = (uint8_t) colorDataRaw[(j*width*3) + new_i*3 +1];
384  point.rgba[0] = (uint8_t) colorDataRaw[(j*width*3) + new_i*3 +2];
385 // }
386 
387  *iter = point;
388  iter++;
389  }
390  }
391 
392 
393  pointCloud_outTopic.write(rosPC_data);
394 
395  // TBD: for debugging purpose only
396  /*
397  if(publishTF)
398  {
399  tf::StampedTransform camera_base_tf(
400  tf::Transform(tf::createQuaternionFromRPY(rotation[0], rotation[1], rotation[2]),
401  tf::Vector3(translation[0], translation[1], translation[2])), ros::Time::now(),
402  rotation_frame_id.c_str(), frame_id);
403  tf_broadcaster->sendTransform(camera_base_tf);
404  }
405  */
406  return true;
407 }
408 
409 bool RGBD2PointCloud::interruptModule()
410 {
411  yDebug() << "Interrupting module";
412  imageFrame_inputPort.interrupt();
413  depthFrame_inputPort.interrupt();
414 
415  imageFrame_inputPort.close();
416  depthFrame_inputPort.close();
417 
418  pointCloud_outTopic.close();
419  rpcPort.close();
420  return true;
421 }
422 
423 bool RGBD2PointCloud::close()
424 {
425  yTrace();
426  interruptModule();
427  return true;
428 }
429