46 _pc(std::move(pc)), fname(filename)
56 void func(frame data, frame_source& source)
59 if (
auto fs = data.as<frameset>()) {
61 if (f.is<points>()) depth = f;
62 else if (!depth && f.is<depth_frame>()) depth = f;
63 else if (!color && f.is<video_frame>()) color = f;
65 }
else if (data.is<depth_frame>() || data.is<points>()) {
69 if (!depth)
throw std::runtime_error(
"Need depth data to save PLY");
70 if (!depth.is<points>()) {
71 if (color) _pc.map_to(color);
72 depth = _pc.calculate(depth);
75 export_to_ply(depth, color);
76 source.frame_ready(data);
79 void export_to_ply(points p, video_frame color) {
84 const auto verts = p.get_vertices();
85 const auto texcoords = p.get_texture_coordinates();
86 const uint8_t* texture_data =
nullptr;
88 texture_data =
reinterpret_cast<const uint8_t*
>(color.get_data());
89 std::vector<rs2::vertex> new_verts;
90 std::vector<vec3d> normals;
91 std::vector<std::array<uint8_t, 3>> new_tex;
92 std::map<size_t, size_t> idx_map;
93 std::map<size_t, std::vector<vec3d>> index_to_normals;
95 new_verts.reserve(p.size());
96 if (use_texcoords) new_tex.reserve(p.size());
98 static const auto min_distance = 1e-6;
100 for (
size_t i = 0; i < p.size(); ++i) {
101 if (fabs(verts[i].x) >= min_distance || fabs(verts[i].y) >= min_distance ||
102 fabs(verts[i].z) >= min_distance)
104 idx_map[int(i)] = int(new_verts.size());
105 new_verts.push_back({ verts[i].x, -1 * verts[i].y, -1 * verts[i].z });
108 auto rgb = get_texcolor(color, texture_data, texcoords[i].u, texcoords[i].v);
109 new_tex.push_back(rgb);
114 auto profile = p.get_profile().as<video_stream_profile>();
115 auto width = profile.width(), height = profile.height();
117 std::vector<std::array<size_t, 3>> faces;
120 for (
size_t x = 0; x < width - 1; ++x) {
121 for (
size_t y = 0; y < height - 1; ++y) {
122 auto a = y * width + x, b = y * width + x + 1, c = (y + 1)*width + x, d = (y + 1)*width + x + 1;
123 if (verts[a].z && verts[b].z && verts[c].z && verts[d].z
124 && fabs(verts[a].z - verts[b].z) < threshold && fabs(verts[a].z - verts[c].z) < threshold
125 && fabs(verts[b].z - verts[d].z) < threshold && fabs(verts[c].z - verts[d].z) < threshold)
127 if (idx_map.count(a) == 0 || idx_map.count(b) == 0 || idx_map.count(c) == 0 ||
128 idx_map.count(d) == 0)
130 faces.push_back({ idx_map[a], idx_map[d], idx_map[b] });
131 faces.push_back({ idx_map[d], idx_map[a], idx_map[c] });
135 vec3d point_a = { verts[a].x , -1 * verts[a].y, -1 * verts[a].z };
136 vec3d point_b = { verts[b].x , -1 * verts[b].y, -1 * verts[b].z };
137 vec3d point_c = { verts[c].x , -1 * verts[c].y, -1 * verts[c].z };
138 vec3d point_d = { verts[d].x , -1 * verts[d].y, -1 * verts[d].z };
140 auto n1 =
cross(point_d - point_a, point_b - point_a);
141 auto n2 =
cross(point_c - point_a, point_d - point_a);
143 index_to_normals[idx_map[a]].push_back(n1);
144 index_to_normals[idx_map[a]].push_back(n2);
146 index_to_normals[idx_map[b]].push_back(n1);
148 index_to_normals[idx_map[c]].push_back(n2);
150 index_to_normals[idx_map[d]].push_back(n1);
151 index_to_normals[idx_map[d]].push_back(n2);
158 if (mesh && use_normals)
160 for (
size_t i = 0; i < new_verts.size(); ++i)
162 auto normals_vec = index_to_normals[i];
163 vec3d sum = { 0, 0, 0 };
164 for (
auto& n : normals_vec)
166 if (normals_vec.size() > 0)
167 normals.push_back((sum.normalize()));
169 normals.push_back({ 0, 0, 0 });
173 std::ofstream out(fname);
176 out <<
"format binary_little_endian 1.0\n";
178 out <<
"format ascii 1.0\n";
179 out <<
"comment pointcloud saved from Realsense Viewer\n";
180 out <<
"element vertex " << new_verts.size() <<
"\n";
181 out <<
"property float" <<
sizeof(float) * 8 <<
" x\n";
182 out <<
"property float" <<
sizeof(float) * 8 <<
" y\n";
183 out <<
"property float" <<
sizeof(float) * 8 <<
" z\n";
184 if (mesh && use_normals)
186 out <<
"property float" <<
sizeof(float) * 8 <<
" nx\n";
187 out <<
"property float" <<
sizeof(float) * 8 <<
" ny\n";
188 out <<
"property float" <<
sizeof(float) * 8 <<
" nz\n";
192 out <<
"property uchar red\n";
193 out <<
"property uchar green\n";
194 out <<
"property uchar blue\n";
198 out <<
"element face " << faces.size() <<
"\n";
199 out <<
"property list uchar int vertex_indices\n";
201 out <<
"end_header\n";
206 out.open(fname, std::ios_base::app | std::ios_base::binary);
207 for (
size_t i = 0; i < new_verts.size(); ++i)
210 out.write(
reinterpret_cast<const char*
>(&(new_verts[i].x)),
sizeof(
float));
211 out.write(
reinterpret_cast<const char*
>(&(new_verts[i].y)),
sizeof(
float));
212 out.write(
reinterpret_cast<const char*
>(&(new_verts[i].z)),
sizeof(
float));
214 if (mesh && use_normals)
216 out.write(
reinterpret_cast<const char*
>(&(normals[i].x)),
sizeof(
float));
217 out.write(
reinterpret_cast<const char*
>(&(normals[i].y)),
sizeof(
float));
218 out.write(
reinterpret_cast<const char*
>(&(normals[i].z)),
sizeof(
float));
223 out.write(
reinterpret_cast<const char*
>(&(new_tex[i][0])),
sizeof(uint8_t));
224 out.write(
reinterpret_cast<const char*
>(&(new_tex[i][1])),
sizeof(uint8_t));
225 out.write(
reinterpret_cast<const char*
>(&(new_tex[i][2])),
sizeof(uint8_t));
230 auto size = faces.size();
231 for (
size_t i = 0; i < size; ++i) {
232 static const int three = 3;
233 out.write(
reinterpret_cast<const char*
>(&three),
sizeof(uint8_t));
234 out.write(
reinterpret_cast<const char*
>(&(faces[i][0])),
sizeof(
int));
235 out.write(
reinterpret_cast<const char*
>(&(faces[i][1])),
sizeof(
int));
236 out.write(
reinterpret_cast<const char*
>(&(faces[i][2])),
sizeof(
int));
242 for (
size_t i = 0; i <new_verts.size(); ++i)
244 out << new_verts[i].x <<
" ";
245 out << new_verts[i].y <<
" ";
246 out << new_verts[i].z <<
" ";
248 if (mesh && use_normals)
250 out << normals[i].x <<
" ";
251 out << normals[i].y <<
" ";
252 out << normals[i].z <<
" ";
257 out << unsigned(new_tex[i][0]) <<
" ";
258 out << unsigned(new_tex[i][1]) <<
" ";
259 out << unsigned(new_tex[i][2]) <<
" ";
266 auto size = faces.size();
267 for (
size_t i = 0; i < size; ++i) {
270 out << std::get<0>(faces[i]) <<
" ";
271 out << std::get<1>(faces[i]) <<
" ";
272 out << std::get<2>(faces[i]) <<
" ";
279 std::array<uint8_t, 3> get_texcolor(
const video_frame& texture,
const uint8_t* texture_data,
float u,
float v)
281 const int w = texture.get_width(), h = texture.get_height();
282 int x = std::min(std::max(
int(u*w + .5f), 0), w - 1);
283 int y = std::min(std::max(
int(v*h + .5f), 0), h - 1);
284 int idx = x * texture.get_bytes_per_pixel() + y * texture.get_stride_in_bytes();
285 return { texture_data[idx], texture_data[idx + 1], texture_data[idx + 2] };
299 void save(frame data, frame_source& source,
bool do_signal=
true)
303 std::vector<std::tuple<software_sensor, stream_profile, int>> sensors;
304 std::vector<std::tuple<stream_profile, stream_profile>> extrinsics;
306 if (
auto fs = data.as<frameset>()) {
307 for (
int i = 0; size_t(i) < fs.size(); ++i) {
309 auto profile = f.get_profile();
310 std::stringstream sname;
311 sname <<
"Sensor (" << i <<
")";
312 auto s = dev.add_sensor(sname.str());
313 stream_profile software_profile;
315 if (
auto vf = f.as<video_frame>()) {
316 auto vp = profile.as<video_stream_profile>();
317 rs2_video_stream stream{ vp.stream_type(), vp.stream_index(), i, vp.width(), vp.height(), vp.fps(), vf.get_bytes_per_pixel(), vp.format(), vp.get_intrinsics() };
318 software_profile = s.add_video_stream(stream);
319 if (f.is<rs2::depth_frame>()) {
323 }
else if (f.is<motion_frame>()) {
324 auto mp = profile.as<motion_stream_profile>();
325 rs2_motion_stream stream{ mp.stream_type(), mp.stream_index(), i, mp.fps(), mp.format(), mp.get_motion_intrinsics() };
326 software_profile = s.add_motion_stream(stream);
327 }
else if (f.is<pose_frame>()) {
328 rs2_pose_stream stream{ profile.stream_type(), profile.stream_index(), i, profile.fps(), profile.format() };
329 software_profile = s.add_pose_stream(stream);
334 sensors.emplace_back(s, software_profile, i);
336 bool found_extrin =
false;
337 for (
auto& root : extrinsics) {
339 std::get<0>(root).register_extrinsics_to(software_profile,
340 std::get<1>(root).get_extrinsics_to(profile)
347 extrinsics.emplace_back(software_profile, profile);
353 std::stringstream name;
354 name << fname << data.get_frame_number() <<
".bag";
355 recorder rec(name.str(), dev);
357 for (
auto group : sensors) {
358 auto s = std::get<0>(group);
359 auto profile = std::get<1>(group);
361 s.start([](frame) {});
362 frame f = fs[std::get<2>(group)];
363 if (
auto vf = f.as<video_frame>()) {
364 s.on_video_frame({
const_cast<void*
>(vf.get_data()), [](
void*) {}, vf.get_stride_in_bytes(), vf.get_bytes_per_pixel(),
365 vf.get_timestamp(), vf.get_frame_timestamp_domain(),
static_cast<int>(vf.get_frame_number()), profile });
366 }
else if (f.is<motion_frame>()) {
367 s.on_motion_frame({
const_cast<void*
>(f.get_data()), [](
void*) {}, f.get_timestamp(),
368 f.get_frame_timestamp_domain(),
static_cast<int>(f.get_frame_number()), profile });
369 }
else if (f.is<pose_frame>()) {
370 s.on_pose_frame({
const_cast<void*
>(f.get_data()), [](
void*) {}, f.get_timestamp(),
371 f.get_frame_timestamp_domain(),
static_cast<int>(f.get_frame_number()), profile });
378 auto set = source.allocate_composite_frame({ data });
379 save(set, source,
false);
383 source.frame_ready(data);