-
Notifications
You must be signed in to change notification settings - Fork 674
/
Copy pathROSVisualizerHelper.cpp
304 lines (267 loc) · 13.9 KB
/
ROSVisualizerHelper.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
/*
* OpenVINS: An Open Platform for Visual-Inertial Research
* Copyright (C) 2018-2023 Patrick Geneva
* Copyright (C) 2018-2023 Guoquan Huang
* Copyright (C) 2018-2023 OpenVINS Contributors
* Copyright (C) 2018-2019 Kevin Eckenhoff
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
#include "ROSVisualizerHelper.h"
#include "core/VioManager.h"
#include "sim/Simulator.h"
#include "state/State.h"
#include "state/StateHelper.h"
#include "types/PoseJPL.h"
using namespace ov_msckf;
using namespace std;
#if ROS_AVAILABLE == 1
sensor_msgs::PointCloud2 ROSVisualizerHelper::get_ros_pointcloud(const std::vector<Eigen::Vector3d> &feats) {
// Declare message and sizes
sensor_msgs::PointCloud2 cloud;
cloud.header.frame_id = "global";
cloud.header.stamp = ros::Time::now();
cloud.width = 3 * feats.size();
cloud.height = 1;
cloud.is_bigendian = false;
cloud.is_dense = false; // there may be invalid points
// Setup pointcloud fields
sensor_msgs::PointCloud2Modifier modifier(cloud);
modifier.setPointCloud2FieldsByString(1, "xyz");
modifier.resize(3 * feats.size());
// Iterators
sensor_msgs::PointCloud2Iterator<float> out_x(cloud, "x");
sensor_msgs::PointCloud2Iterator<float> out_y(cloud, "y");
sensor_msgs::PointCloud2Iterator<float> out_z(cloud, "z");
// Fill our iterators
for (const auto &pt : feats) {
*out_x = (float)pt(0);
++out_x;
*out_y = (float)pt(1);
++out_y;
*out_z = (float)pt(2);
++out_z;
}
return cloud;
}
tf::StampedTransform ROSVisualizerHelper::get_stamped_transform_from_pose(const std::shared_ptr<ov_type::PoseJPL> &pose, bool flip_trans) {
// Need to flip the transform to the IMU frame
Eigen::Vector4d q_ItoC = pose->quat();
Eigen::Vector3d p_CinI = pose->pos();
if (flip_trans) {
p_CinI = -pose->Rot().transpose() * pose->pos();
}
// publish our transform on TF
// NOTE: since we use JPL we have an implicit conversion to Hamilton when we publish
// NOTE: a rotation from ItoC in JPL has the same xyzw as a CtoI Hamilton rotation
tf::StampedTransform trans;
trans.stamp_ = ros::Time::now();
tf::Quaternion quat(q_ItoC(0), q_ItoC(1), q_ItoC(2), q_ItoC(3));
trans.setRotation(quat);
tf::Vector3 orig(p_CinI(0), p_CinI(1), p_CinI(2));
trans.setOrigin(orig);
return trans;
}
#endif
#if ROS_AVAILABLE == 2
sensor_msgs::msg::PointCloud2 ROSVisualizerHelper::get_ros_pointcloud(std::shared_ptr<rclcpp::Node> node,
const std::vector<Eigen::Vector3d> &feats) {
// Declare message and sizes
sensor_msgs::msg::PointCloud2 cloud;
cloud.header.frame_id = "global";
cloud.header.stamp = node->now();
cloud.width = 3 * feats.size();
cloud.height = 1;
cloud.is_bigendian = false;
cloud.is_dense = false; // there may be invalid points
// Setup pointcloud fields
sensor_msgs::PointCloud2Modifier modifier(cloud);
modifier.setPointCloud2FieldsByString(1, "xyz");
modifier.resize(3 * feats.size());
// Iterators
sensor_msgs::PointCloud2Iterator<float> out_x(cloud, "x");
sensor_msgs::PointCloud2Iterator<float> out_y(cloud, "y");
sensor_msgs::PointCloud2Iterator<float> out_z(cloud, "z");
// Fill our iterators
for (const auto &pt : feats) {
*out_x = (float)pt(0);
++out_x;
*out_y = (float)pt(1);
++out_y;
*out_z = (float)pt(2);
++out_z;
}
return cloud;
}
geometry_msgs::msg::TransformStamped ROSVisualizerHelper::get_stamped_transform_from_pose(std::shared_ptr<rclcpp::Node> node,
const std::shared_ptr<ov_type::PoseJPL> &pose,
bool flip_trans) {
// Need to flip the transform to the IMU frame
Eigen::Vector4d q_ItoC = pose->quat();
Eigen::Vector3d p_CinI = pose->pos();
if (flip_trans) {
p_CinI = -pose->Rot().transpose() * pose->pos();
}
// publish our transform on TF
// NOTE: since we use JPL we have an implicit conversion to Hamilton when we publish
// NOTE: a rotation from ItoC in JPL has the same xyzw as a CtoI Hamilton rotation
geometry_msgs::msg::TransformStamped trans;
trans.header.stamp = node->now();
trans.transform.rotation.x = q_ItoC(0);
trans.transform.rotation.y = q_ItoC(1);
trans.transform.rotation.z = q_ItoC(2);
trans.transform.rotation.w = q_ItoC(3);
trans.transform.translation.x = p_CinI(0);
trans.transform.translation.y = p_CinI(1);
trans.transform.translation.z = p_CinI(2);
return trans;
}
#endif
void ROSVisualizerHelper::sim_save_total_state_to_file(std::shared_ptr<State> state, std::shared_ptr<Simulator> sim,
std::ofstream &of_state_est, std::ofstream &of_state_std,
std::ofstream &of_state_gt) {
// We want to publish in the IMU clock frame
// The timestamp in the state will be the last camera time
double t_ItoC = state->_calib_dt_CAMtoIMU->value()(0);
double timestamp_inI = state->_timestamp + t_ItoC;
// If we have our simulator, then save it to our groundtruth file
if (sim != nullptr) {
// Note that we get the true time in the IMU clock frame
// NOTE: we record both the estimate and groundtruth with the same "true" timestamp if we are doing simulation
Eigen::Matrix<double, 17, 1> state_gt;
timestamp_inI = state->_timestamp + sim->get_true_parameters().calib_camimu_dt;
if (sim->get_state(timestamp_inI, state_gt)) {
// STATE: write current true state
of_state_gt.precision(5);
of_state_gt.setf(std::ios::fixed, std::ios::floatfield);
of_state_gt << state_gt(0) << " ";
of_state_gt.precision(6);
of_state_gt << state_gt(1) << " " << state_gt(2) << " " << state_gt(3) << " " << state_gt(4) << " ";
of_state_gt << state_gt(5) << " " << state_gt(6) << " " << state_gt(7) << " ";
of_state_gt << state_gt(8) << " " << state_gt(9) << " " << state_gt(10) << " ";
of_state_gt << state_gt(11) << " " << state_gt(12) << " " << state_gt(13) << " ";
of_state_gt << state_gt(14) << " " << state_gt(15) << " " << state_gt(16) << " ";
// TIMEOFF: Get the current true time offset
of_state_gt.precision(7);
of_state_gt << sim->get_true_parameters().calib_camimu_dt << " ";
of_state_gt.precision(0);
of_state_gt << state->_options.num_cameras << " ";
of_state_gt.precision(6);
// CALIBRATION: Write the camera values to file
assert(state->_options.num_cameras == sim->get_true_parameters().state_options.num_cameras);
for (int i = 0; i < state->_options.num_cameras; i++) {
// Intrinsics values
of_state_gt << sim->get_true_parameters().camera_intrinsics.at(i)->get_value()(0) << " "
<< sim->get_true_parameters().camera_intrinsics.at(i)->get_value()(1) << " "
<< sim->get_true_parameters().camera_intrinsics.at(i)->get_value()(2) << " "
<< sim->get_true_parameters().camera_intrinsics.at(i)->get_value()(3) << " ";
of_state_gt << sim->get_true_parameters().camera_intrinsics.at(i)->get_value()(4) << " "
<< sim->get_true_parameters().camera_intrinsics.at(i)->get_value()(5) << " "
<< sim->get_true_parameters().camera_intrinsics.at(i)->get_value()(6) << " "
<< sim->get_true_parameters().camera_intrinsics.at(i)->get_value()(7) << " ";
// Rotation and position
of_state_gt << sim->get_true_parameters().camera_extrinsics.at(i)(0) << " " << sim->get_true_parameters().camera_extrinsics.at(i)(1)
<< " " << sim->get_true_parameters().camera_extrinsics.at(i)(2) << " "
<< sim->get_true_parameters().camera_extrinsics.at(i)(3) << " ";
of_state_gt << sim->get_true_parameters().camera_extrinsics.at(i)(4) << " " << sim->get_true_parameters().camera_extrinsics.at(i)(5)
<< " " << sim->get_true_parameters().camera_extrinsics.at(i)(6) << " ";
}
// New line
of_state_gt << endl;
}
}
//==========================================================================
//==========================================================================
//==========================================================================
// Get the covariance of the whole system
Eigen::MatrixXd cov = StateHelper::get_full_covariance(state);
// STATE: Write the current state to file
of_state_est.precision(5);
of_state_est.setf(std::ios::fixed, std::ios::floatfield);
of_state_est << timestamp_inI << " ";
of_state_est.precision(6);
of_state_est << state->_imu->quat()(0) << " " << state->_imu->quat()(1) << " " << state->_imu->quat()(2) << " " << state->_imu->quat()(3)
<< " ";
of_state_est << state->_imu->pos()(0) << " " << state->_imu->pos()(1) << " " << state->_imu->pos()(2) << " ";
of_state_est << state->_imu->vel()(0) << " " << state->_imu->vel()(1) << " " << state->_imu->vel()(2) << " ";
of_state_est << state->_imu->bias_g()(0) << " " << state->_imu->bias_g()(1) << " " << state->_imu->bias_g()(2) << " ";
of_state_est << state->_imu->bias_a()(0) << " " << state->_imu->bias_a()(1) << " " << state->_imu->bias_a()(2) << " ";
// STATE: Write current uncertainty to file
of_state_std.precision(5);
of_state_std.setf(std::ios::fixed, std::ios::floatfield);
of_state_std << timestamp_inI << " ";
of_state_std.precision(6);
int id = state->_imu->q()->id();
of_state_std << std::sqrt(cov(id + 0, id + 0)) << " " << std::sqrt(cov(id + 1, id + 1)) << " " << std::sqrt(cov(id + 2, id + 2)) << " ";
id = state->_imu->p()->id();
of_state_std << std::sqrt(cov(id + 0, id + 0)) << " " << std::sqrt(cov(id + 1, id + 1)) << " " << std::sqrt(cov(id + 2, id + 2)) << " ";
id = state->_imu->v()->id();
of_state_std << std::sqrt(cov(id + 0, id + 0)) << " " << std::sqrt(cov(id + 1, id + 1)) << " " << std::sqrt(cov(id + 2, id + 2)) << " ";
id = state->_imu->bg()->id();
of_state_std << std::sqrt(cov(id + 0, id + 0)) << " " << std::sqrt(cov(id + 1, id + 1)) << " " << std::sqrt(cov(id + 2, id + 2)) << " ";
id = state->_imu->ba()->id();
of_state_std << std::sqrt(cov(id + 0, id + 0)) << " " << std::sqrt(cov(id + 1, id + 1)) << " " << std::sqrt(cov(id + 2, id + 2)) << " ";
// TIMEOFF: Get the current estimate time offset
of_state_est.precision(7);
of_state_est << state->_calib_dt_CAMtoIMU->value()(0) << " ";
of_state_est.precision(0);
of_state_est << state->_options.num_cameras << " ";
of_state_est.precision(6);
// TIMEOFF: Get the current std values
if (state->_options.do_calib_camera_timeoffset) {
of_state_std << std::sqrt(cov(state->_calib_dt_CAMtoIMU->id(), state->_calib_dt_CAMtoIMU->id())) << " ";
} else {
of_state_std << 0.0 << " ";
}
of_state_std.precision(0);
of_state_std << state->_options.num_cameras << " ";
of_state_std.precision(6);
// CALIBRATION: Write the camera values to file
for (int i = 0; i < state->_options.num_cameras; i++) {
// Intrinsics values
of_state_est << state->_cam_intrinsics.at(i)->value()(0) << " " << state->_cam_intrinsics.at(i)->value()(1) << " "
<< state->_cam_intrinsics.at(i)->value()(2) << " " << state->_cam_intrinsics.at(i)->value()(3) << " ";
of_state_est << state->_cam_intrinsics.at(i)->value()(4) << " " << state->_cam_intrinsics.at(i)->value()(5) << " "
<< state->_cam_intrinsics.at(i)->value()(6) << " " << state->_cam_intrinsics.at(i)->value()(7) << " ";
// Rotation and position
of_state_est << state->_calib_IMUtoCAM.at(i)->value()(0) << " " << state->_calib_IMUtoCAM.at(i)->value()(1) << " "
<< state->_calib_IMUtoCAM.at(i)->value()(2) << " " << state->_calib_IMUtoCAM.at(i)->value()(3) << " ";
of_state_est << state->_calib_IMUtoCAM.at(i)->value()(4) << " " << state->_calib_IMUtoCAM.at(i)->value()(5) << " "
<< state->_calib_IMUtoCAM.at(i)->value()(6) << " ";
// Covariance
if (state->_options.do_calib_camera_intrinsics) {
int index_in = state->_cam_intrinsics.at(i)->id();
of_state_std << std::sqrt(cov(index_in + 0, index_in + 0)) << " " << std::sqrt(cov(index_in + 1, index_in + 1)) << " "
<< std::sqrt(cov(index_in + 2, index_in + 2)) << " " << std::sqrt(cov(index_in + 3, index_in + 3)) << " ";
of_state_std << std::sqrt(cov(index_in + 4, index_in + 4)) << " " << std::sqrt(cov(index_in + 5, index_in + 5)) << " "
<< std::sqrt(cov(index_in + 6, index_in + 6)) << " " << std::sqrt(cov(index_in + 7, index_in + 7)) << " ";
} else {
of_state_std << 0.0 << " " << 0.0 << " " << 0.0 << " " << 0.0 << " ";
of_state_std << 0.0 << " " << 0.0 << " " << 0.0 << " " << 0.0 << " ";
}
if (state->_options.do_calib_camera_pose) {
int index_ex = state->_calib_IMUtoCAM.at(i)->id();
of_state_std << std::sqrt(cov(index_ex + 0, index_ex + 0)) << " " << std::sqrt(cov(index_ex + 1, index_ex + 1)) << " "
<< std::sqrt(cov(index_ex + 2, index_ex + 2)) << " ";
of_state_std << std::sqrt(cov(index_ex + 3, index_ex + 3)) << " " << std::sqrt(cov(index_ex + 4, index_ex + 4)) << " "
<< std::sqrt(cov(index_ex + 5, index_ex + 5)) << " ";
} else {
of_state_std << 0.0 << " " << 0.0 << " " << 0.0 << " ";
of_state_std << 0.0 << " " << 0.0 << " " << 0.0 << " ";
}
}
// Done with the estimates!
of_state_est << endl;
of_state_std << endl;
}