Intel® RealSense™ Cross Platform API
Intel Realsense Cross-platform API
rs_export.hpp
Go to the documentation of this file.
1 // License: Apache 2.0. See LICENSE file in root directory.
2 // Copyright(c) 2017 Intel Corporation. All Rights Reserved.
3 
4 #ifndef LIBREALSENSE_RS2_EXPORT_HPP
5 #define LIBREALSENSE_RS2_EXPORT_HPP
6 
7 #include <map>
8 #include <fstream>
9 #include <cmath>
10 #include <sstream>
11 #include <cassert>
12 #include "rs_processing.hpp"
13 #include "rs_internal.hpp"
14 #include <iostream>
15 #include <thread>
16 #include <chrono>
17 namespace rs2
18 {
19  struct vec3d {
20  float x, y, z;
21  float length() const { return sqrt(x * x + y * y + z * z); }
22 
23  vec3d normalize() const
24  {
25  auto len = length();
26  return { x / len, y / len, z / len };
27  }
28  };
29 
30  inline vec3d operator + (const vec3d & a, const vec3d & b) { return{ a.x + b.x, a.y + b.y, a.z + b.z }; }
31  inline vec3d operator - (const vec3d & a, const vec3d & b) { return{ a.x - b.x, a.y - b.y, a.z - b.z }; }
32  inline vec3d cross(const vec3d & a, const vec3d & b) { return { a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y - a.y * b.x }; }
33 
34  class save_to_ply : public filter
35  {
36  public:
37  static const auto OPTION_IGNORE_COLOR = rs2_option(RS2_OPTION_COUNT + 10);
38  static const auto OPTION_PLY_MESH = rs2_option(RS2_OPTION_COUNT + 11);
39  static const auto OPTION_PLY_BINARY = rs2_option(RS2_OPTION_COUNT + 12);
40  static const auto OPTION_PLY_NORMALS = rs2_option(RS2_OPTION_COUNT + 13);
42 
43  save_to_ply(std::string filename = "RealSense Pointcloud ", pointcloud pc = pointcloud()) : filter([this](frame f, frame_source& s) { func(f, s); }),
44  _pc(std::move(pc)), fname(filename)
45  {
47  register_simple_option(OPTION_PLY_MESH, option_range{ 0, 1, 1, 1 });
48  register_simple_option(OPTION_PLY_NORMALS, option_range{ 0, 1, 0, 1 });
49  register_simple_option(OPTION_PLY_BINARY, option_range{ 0, 1, 1, 1 });
50  register_simple_option(OPTION_PLY_THRESHOLD, option_range{ 0, 1, 0.05f, 0 });
51  }
52 
53  private:
54  void func(frame data, frame_source& source)
55  {
56  frame depth, color;
57  if (auto fs = data.as<frameset>()) {
58  for (auto f : fs) {
59  if (f.is<points>()) depth = f;
60  else if (!depth && f.is<depth_frame>()) depth = f;
61  else if (!color && f.is<video_frame>()) color = f;
62  }
63  } else if (data.is<depth_frame>() || data.is<points>()) {
64  depth = data;
65  }
66 
67  if (!depth) throw std::runtime_error("Need depth data to save PLY");
68  if (!depth.is<points>()) {
69  if (color) _pc.map_to(color);
70  depth = _pc.calculate(depth);
71  }
72 
73  export_to_ply(depth, color);
74  source.frame_ready(data); // passthrough filter because processing_block::process doesn't support sinks
75  }
76 
77  void export_to_ply(points p, video_frame color) {
78  const bool use_texcoords = color && !get_option(OPTION_IGNORE_COLOR);
79  bool mesh = get_option(OPTION_PLY_MESH) != 0;
80  bool binary = get_option(OPTION_PLY_BINARY) != 0;
81  bool use_normals = get_option(OPTION_PLY_NORMALS) != 0;
82  const auto verts = p.get_vertices();
83  const auto texcoords = p.get_texture_coordinates();
84  const uint8_t* texture_data;
85  if (use_texcoords) // texture might be on the gpu, get pointer to data before for-loop to avoid repeated access
86  texture_data = reinterpret_cast<const uint8_t*>(color.get_data());
87  std::vector<rs2::vertex> new_verts;
88  std::vector<vec3d> normals;
89  std::vector<std::array<uint8_t, 3>> new_tex;
90  std::map<size_t, size_t> idx_map;
91  std::map<size_t, std::vector<vec3d>> index_to_normals;
92 
93  new_verts.reserve(p.size());
94  if (use_texcoords) new_tex.reserve(p.size());
95 
96  static const auto min_distance = 1e-6;
97 
98  for (size_t i = 0; i < p.size(); ++i) {
99  if (fabs(verts[i].x) >= min_distance || fabs(verts[i].y) >= min_distance ||
100  fabs(verts[i].z) >= min_distance)
101  {
102  idx_map[int(i)] = int(new_verts.size());
103  new_verts.push_back({ verts[i].x, -1 * verts[i].y, -1 * verts[i].z });
104  if (use_texcoords)
105  {
106  auto rgb = get_texcolor(color, texture_data, texcoords[i].u, texcoords[i].v);
107  new_tex.push_back(rgb);
108  }
109  }
110  }
111 
112  auto profile = p.get_profile().as<video_stream_profile>();
113  auto width = profile.width(), height = profile.height();
114  static const auto threshold = get_option(OPTION_PLY_THRESHOLD);
115  std::vector<std::array<size_t, 3>> faces;
116  if (mesh)
117  {
118  for (size_t x = 0; x < width - 1; ++x) {
119  for (size_t y = 0; y < height - 1; ++y) {
120  auto a = y * width + x, b = y * width + x + 1, c = (y + 1)*width + x, d = (y + 1)*width + x + 1;
121  if (verts[a].z && verts[b].z && verts[c].z && verts[d].z
122  && fabs(verts[a].z - verts[b].z) < threshold && fabs(verts[a].z - verts[c].z) < threshold
123  && fabs(verts[b].z - verts[d].z) < threshold && fabs(verts[c].z - verts[d].z) < threshold)
124  {
125  if (idx_map.count(a) == 0 || idx_map.count(b) == 0 || idx_map.count(c) == 0 ||
126  idx_map.count(d) == 0)
127  continue;
128  faces.push_back({ idx_map[a], idx_map[d], idx_map[b] });
129  faces.push_back({ idx_map[d], idx_map[a], idx_map[c] });
130 
131  if (use_normals)
132  {
133  vec3d point_a = { verts[a].x , -1 * verts[a].y, -1 * verts[a].z };
134  vec3d point_b = { verts[b].x , -1 * verts[b].y, -1 * verts[b].z };
135  vec3d point_c = { verts[c].x , -1 * verts[c].y, -1 * verts[c].z };
136  vec3d point_d = { verts[d].x , -1 * verts[d].y, -1 * verts[d].z };
137 
138  auto n1 = cross(point_d - point_a, point_b - point_a);
139  auto n2 = cross(point_c - point_a, point_d - point_a);
140 
141  index_to_normals[idx_map[a]].push_back(n1);
142  index_to_normals[idx_map[a]].push_back(n2);
143 
144  index_to_normals[idx_map[b]].push_back(n1);
145 
146  index_to_normals[idx_map[c]].push_back(n2);
147 
148  index_to_normals[idx_map[d]].push_back(n1);
149  index_to_normals[idx_map[d]].push_back(n2);
150  }
151  }
152  }
153  }
154  }
155 
156  if (mesh && use_normals)
157  {
158  for (size_t i = 0; i < new_verts.size(); ++i)
159  {
160  auto normals_vec = index_to_normals[i];
161  vec3d sum = { 0, 0, 0 };
162  for (auto& n : normals_vec)
163  sum = sum + n;
164  if (normals_vec.size() > 0)
165  normals.push_back((sum.normalize()));
166  else
167  normals.push_back({ 0, 0, 0 });
168  }
169  }
170 
171  std::ofstream out(fname);
172  out << "ply\n";
173  if (binary)
174  out << "format binary_little_endian 1.0\n";
175  else
176  out << "format ascii 1.0\n";
177  out << "comment pointcloud saved from Realsense Viewer\n";
178  out << "element vertex " << new_verts.size() << "\n";
179  out << "property float" << sizeof(float) * 8 << " x\n";
180  out << "property float" << sizeof(float) * 8 << " y\n";
181  out << "property float" << sizeof(float) * 8 << " z\n";
182  if (mesh && use_normals)
183  {
184  out << "property float" << sizeof(float) * 8 << " nx\n";
185  out << "property float" << sizeof(float) * 8 << " ny\n";
186  out << "property float" << sizeof(float) * 8 << " nz\n";
187  }
188  if (use_texcoords)
189  {
190  out << "property uchar red\n";
191  out << "property uchar green\n";
192  out << "property uchar blue\n";
193  }
194  if (mesh)
195  {
196  out << "element face " << faces.size() << "\n";
197  out << "property list uchar int vertex_indices\n";
198  }
199  out << "end_header\n";
200 
201  if (binary)
202  {
203  out.close();
204  out.open(fname, std::ios_base::app | std::ios_base::binary);
205  for (size_t i = 0; i < new_verts.size(); ++i)
206  {
207  // we assume little endian architecture on your device
208  out.write(reinterpret_cast<const char*>(&(new_verts[i].x)), sizeof(float));
209  out.write(reinterpret_cast<const char*>(&(new_verts[i].y)), sizeof(float));
210  out.write(reinterpret_cast<const char*>(&(new_verts[i].z)), sizeof(float));
211 
212  if (mesh && use_normals)
213  {
214  out.write(reinterpret_cast<const char*>(&(normals[i].x)), sizeof(float));
215  out.write(reinterpret_cast<const char*>(&(normals[i].y)), sizeof(float));
216  out.write(reinterpret_cast<const char*>(&(normals[i].z)), sizeof(float));
217  }
218 
219  if (use_texcoords)
220  {
221  out.write(reinterpret_cast<const char*>(&(new_tex[i][0])), sizeof(uint8_t));
222  out.write(reinterpret_cast<const char*>(&(new_tex[i][1])), sizeof(uint8_t));
223  out.write(reinterpret_cast<const char*>(&(new_tex[i][2])), sizeof(uint8_t));
224  }
225  }
226  if (mesh)
227  {
228  auto size = faces.size();
229  for (size_t i = 0; i < size; ++i) {
230  static const int three = 3;
231  out.write(reinterpret_cast<const char*>(&three), sizeof(uint8_t));
232  out.write(reinterpret_cast<const char*>(&(faces[i][0])), sizeof(int));
233  out.write(reinterpret_cast<const char*>(&(faces[i][1])), sizeof(int));
234  out.write(reinterpret_cast<const char*>(&(faces[i][2])), sizeof(int));
235  }
236  }
237  }
238  else
239  {
240  for (size_t i = 0; i <new_verts.size(); ++i)
241  {
242  out << new_verts[i].x << " ";
243  out << new_verts[i].y << " ";
244  out << new_verts[i].z << " ";
245  out << "\n";
246 
247  if (mesh && use_normals)
248  {
249  out << normals[i].x << " ";
250  out << normals[i].y << " ";
251  out << normals[i].z << " ";
252  out << "\n";
253  }
254 
255  if (use_texcoords)
256  {
257  out << unsigned(new_tex[i][0]) << " ";
258  out << unsigned(new_tex[i][1]) << " ";
259  out << unsigned(new_tex[i][2]) << " ";
260  out << "\n";
261  }
262  }
263  if (mesh)
264  {
265  auto size = faces.size();
266  for (size_t i = 0; i < size; ++i) {
267  int three = 3;
268  out << three << " ";
269  out << std::get<0>(faces[i]) << " ";
270  out << std::get<1>(faces[i]) << " ";
271  out << std::get<2>(faces[i]) << " ";
272  out << "\n";
273  }
274  }
275  }
276  }
277 
278  std::array<uint8_t, 3> get_texcolor(const video_frame& texture, const uint8_t* texture_data, float u, float v)
279  {
280  const int w = texture.get_width(), h = texture.get_height();
281  int x = std::min(std::max(int(u*w + .5f), 0), w - 1);
282  int y = std::min(std::max(int(v*h + .5f), 0), h - 1);
283  int idx = x * texture.get_bytes_per_pixel() + y * texture.get_stride_in_bytes();
284  return { texture_data[idx], texture_data[idx + 1], texture_data[idx + 2] };
285  }
286 
287  std::string fname;
288  pointcloud _pc;
289  };
290 
291  class save_single_frameset : public filter {
292  public:
293  save_single_frameset(std::string filename = "RealSense Frameset ")
294  : filter([this](frame f, frame_source& s) { save(f, s); }), fname(filename)
295  {}
296 
297  private:
298  void save(frame data, frame_source& source, bool do_signal=true)
299  {
300  software_device dev;
301 
302  std::vector<std::tuple<software_sensor, stream_profile, int>> sensors;
303  std::vector<std::tuple<stream_profile, stream_profile>> extrinsics;
304 
305  if (auto fs = data.as<frameset>()) {
306  for (int i = 0; size_t(i) < fs.size(); ++i) {
307  frame f = fs[i];
308  auto profile = f.get_profile();
309  std::stringstream sname;
310  sname << "Sensor (" << i << ")";
311  auto s = dev.add_sensor(sname.str());
312  stream_profile software_profile;
313 
314  if (auto vf = f.as<video_frame>()) {
315  auto vp = profile.as<video_stream_profile>();
316  rs2_video_stream stream{ vp.stream_type(), vp.stream_index(), i, vp.width(), vp.height(), vp.fps(), vf.get_bytes_per_pixel(), vp.format(), vp.get_intrinsics() };
317  software_profile = s.add_video_stream(stream);
318  if (f.is<rs2::depth_frame>()) {
319  auto ds = sensor_from_frame(f)->as<rs2::depth_sensor>();
320  s.add_read_only_option(RS2_OPTION_DEPTH_UNITS, ds.get_option(RS2_OPTION_DEPTH_UNITS));
321  }
322  } else if (f.is<motion_frame>()) {
323  auto mp = profile.as<motion_stream_profile>();
324  rs2_motion_stream stream{ mp.stream_type(), mp.stream_index(), i, mp.fps(), mp.format(), mp.get_motion_intrinsics() };
325  software_profile = s.add_motion_stream(stream);
326  } else if (f.is<pose_frame>()) {
327  rs2_pose_stream stream{ profile.stream_type(), profile.stream_index(), i, profile.fps(), profile.format() };
328  software_profile = s.add_pose_stream(stream);
329  } else {
330  // TODO: How to handle other frame types? (e.g. points)
331  assert(false);
332  }
333  sensors.emplace_back(s, software_profile, i);
334 
335  bool found_extrin = false;
336  for (auto& root : extrinsics) {
337  try {
338  std::get<0>(root).register_extrinsics_to(software_profile,
339  std::get<1>(root).get_extrinsics_to(profile)
340  );
341  found_extrin = true;
342  break;
343  } catch (...) {}
344  }
345  if (!found_extrin) {
346  extrinsics.emplace_back(software_profile, profile);
347  }
348  }
349 
350 
351  // Recorder needs sensors to already exist when its created
352  std::stringstream name;
353  name << fname << data.get_frame_number() << ".bag";
354  recorder rec(name.str(), dev);
355 
356  for (auto group : sensors) {
357  auto s = std::get<0>(group);
358  auto profile = std::get<1>(group);
359  s.open(profile);
360  s.start([](frame) {});
361  frame f = fs[std::get<2>(group)];
362  if (auto vf = f.as<video_frame>()) {
363  s.on_video_frame({ const_cast<void*>(vf.get_data()), [](void*) {}, vf.get_stride_in_bytes(), vf.get_bytes_per_pixel(),
364  vf.get_timestamp(), vf.get_frame_timestamp_domain(), static_cast<int>(vf.get_frame_number()), profile });
365  } else if (f.is<motion_frame>()) {
366  s.on_motion_frame({ const_cast<void*>(f.get_data()), [](void*) {}, f.get_timestamp(),
367  f.get_frame_timestamp_domain(), static_cast<int>(f.get_frame_number()), profile });
368  } else if (f.is<pose_frame>()) {
369  s.on_pose_frame({ const_cast<void*>(f.get_data()), [](void*) {}, f.get_timestamp(),
370  f.get_frame_timestamp_domain(), static_cast<int>(f.get_frame_number()), profile });
371  }
372  s.stop();
373  s.close();
374  }
375  } else {
376  // single frame
377  auto set = source.allocate_composite_frame({ data });
378  save(set, source, false);
379  }
380 
381  if (do_signal)
382  source.frame_ready(data);
383  }
384 
385  std::string fname;
386  };
387 }
388 
389 #endif
save_to_ply(std::string filename="RealSense Pointcloud ", pointcloud pc=pointcloud())
Definition: rs_export.hpp:43
Definition: rs_option.h:128
Definition: rs_frame.hpp:345
Definition: rs_export.hpp:34
vec3d cross(const vec3d &a, const vec3d &b)
Definition: rs_export.hpp:32
rs2_option
Defines general configuration controls. These can generally be mapped to camera UVC controls...
Definition: rs_option.h:26
int fps
Definition: rs_internal.h:71
Definition: rs_sensor.hpp:454
static const auto OPTION_PLY_MESH
Definition: rs_export.hpp:38
save_single_frameset(std::string filename="RealSense Frameset ")
Definition: rs_export.hpp:293
vec3d operator+(const vec3d &a, const vec3d &b)
Definition: rs_export.hpp:30
static const auto OPTION_PLY_THRESHOLD
Definition: rs_export.hpp:41
void register_simple_option(rs2_option option_id, option_range range)
Definition: rs_processing.hpp:349
All the parameters required to define a video stream.
Definition: rs_internal.h:41
T as() const
Definition: rs_processing.hpp:416
void map_to(frame mapped)
Definition: rs_processing.hpp:470
Definition: rs_context.hpp:11
Definition: rs_export.hpp:19
Definition: rs_option.h:56
float y
Definition: rs_export.hpp:20
float get_option(rs2_option option) const
Definition: rs_options.hpp:216
Definition: rs_processing.hpp:18
static const auto OPTION_PLY_NORMALS
Definition: rs_export.hpp:40
All the parameters required to define a pose stream.
Definition: rs_internal.h:66
Definition: rs_types.hpp:194
int width
Definition: rs_internal.h:46
All the parameters required to define a motion stream.
Definition: rs_internal.h:55
T as() const
Definition: rs_sensor.hpp:333
Definition: rs_processing.hpp:430
Definition: rs_processing.hpp:361
points calculate(frame depth) const
Definition: rs_processing.hpp:449
vec3d normalize() const
Definition: rs_export.hpp:23
static const auto OPTION_PLY_BINARY
Definition: rs_export.hpp:39
float length() const
Definition: rs_export.hpp:21
static const auto OPTION_IGNORE_COLOR
Definition: rs_export.hpp:37
int fps
Definition: rs_internal.h:60
float z
Definition: rs_export.hpp:20
vec3d operator-(const vec3d &a, const vec3d &b)
Definition: rs_export.hpp:31
Definition: rs_export.hpp:291
float x
Definition: rs_export.hpp:20
std::shared_ptr< sensor > sensor_from_frame(frame f)
Definition: rs_sensor.hpp:357
Definition: rs_frame.hpp:813