Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

T265 system timestamps #3453

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions doc/t265.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ The T265's sensors (including the IMU) are calibrated in the production line, so

## Integration with the SDK
The following `librealsense` tools and demos are IMU and tracking-ready:
- `rs-pose` - A basic pose retrieval example
- `rs-pose-predict` - Demonstrates pose prediction using current system time and the callback API
- `rs-capture` - 2D Visualization.
- `rs-enumerate-devices` - list the IMU and tracking profiles (FPS rates and formats).
- `rs-data-collect` - Store and serialize IMU and Tracking (pose) data in Excel-friendly csv format. The tool uses low-level sensor API to minimize software-imposed latencies. Useful for performance profiling.
Expand Down
17 changes: 9 additions & 8 deletions examples/pose-predict/rs-pose-predict.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,8 @@ inline rs2_quaternion quaternion_multiply(rs2_quaternion a, rs2_quaternion b)
return Q;
}

rs2_pose predict_pose(rs2_pose & pose, uint64_t dt_us)
rs2_pose predict_pose(rs2_pose & pose, float dt_s)
{
float dt_s = dt_us * 1e-6f;

rs2_pose P = pose;
P.translation.x = dt_s * (dt_s/2 * pose.acceleration.x + pose.velocity.x) + pose.translation.x;
P.translation.y = dt_s * (dt_s/2 * pose.acceleration.y + pose.velocity.y) + pose.translation.y;
Expand All @@ -55,8 +53,6 @@ int main(int argc, char * argv[]) try
// Add pose stream
cfg.enable_stream(RS2_STREAM_POSE, RS2_FORMAT_6DOF);

uint64_t dt_us = 20000; // predict 20ms (20000 microseconds) into the future

// Define frame callback
// The callback is executed on a sensor thread and can be called simultaneously from multiple sensors
// Therefore any modification to common memory should be done under lock
Expand All @@ -66,11 +62,16 @@ int main(int argc, char * argv[]) try
std::lock_guard<std::mutex> lock(mutex);
if (rs2::pose_frame fp = frame.as<rs2::pose_frame>()) {
rs2_pose pose_data = fp.get_pose_data();
rs2_pose predicted_pose = predict_pose(pose_data, dt_us);
std::cout << pose_data.tracker_confidence << " : " << std::setprecision(3) << std::fixed <<
auto now = std::chrono::system_clock::now().time_since_epoch();
double now_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now).count();
double pose_time_ms = fp.get_timestamp();
float dt_s = static_cast<float>(std::max(0., (now_ms - pose_time_ms)/1000.));
rs2_pose predicted_pose = predict_pose(pose_data, dt_s);
std::cout << "Predicted " << std::fixed << std::setprecision(3) << dt_s*1000 << "ms " <<
"Confidence: " << pose_data.tracker_confidence << " T: " <<
predicted_pose.translation.x << " " <<
predicted_pose.translation.y << " " <<
predicted_pose.translation.z << " (meters)\r";
predicted_pose.translation.z << " (meters) \r";
}
};

Expand Down
48 changes: 27 additions & 21 deletions src/tm2/tm-device.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -95,21 +95,21 @@ namespace librealsense
}
if(_type == RS2_FRAME_METADATA_TIME_OF_ARRIVAL)
{
// Note: additional_data.system_time is the arrival time
// (backend_time is what we have traditionally called
// system_time)
if (auto* vf = dynamic_cast<const video_frame*>(&frm))
{
const video_frame_metadata* md = reinterpret_cast<const video_frame_metadata*>(frm.additional_data.metadata_blob.data());
return (rs2_metadata_type)(md->arrival_ts);
return (rs2_metadata_type)(vf->additional_data.system_time);
}

if (auto* mf = dynamic_cast<const motion_frame*>(&frm))
{
const motion_frame_metadata* md = reinterpret_cast<const motion_frame_metadata*>(frm.additional_data.metadata_blob.data());
return (rs2_metadata_type)(md->arrival_ts);
return (rs2_metadata_type)(mf->additional_data.system_time);
}
if (auto* pf = dynamic_cast<const pose_frame*>(&frm))
{
const pose_frame_metadata* md = reinterpret_cast<const pose_frame_metadata*>(frm.additional_data.metadata_blob.data());
return (rs2_metadata_type)(md->arrival_ts);
return (rs2_metadata_type)(pf->additional_data.system_time);
}
}
if (_type == RS2_FRAME_METADATA_TEMPERATURE)
Expand Down Expand Up @@ -491,8 +491,8 @@ namespace librealsense
f.frameLength = vframe->get_height()*vframe->get_stride()* (vframe->get_bpp() / 8);
f.data = vframe->data.data();
f.timestamp = to_nanos(vframe->additional_data.timestamp);
f.systemTimestamp = to_nanos(vframe->additional_data.system_time);
f.arrivalTimeStamp = time_of_arrival;
f.systemTimestamp = to_nanos(vframe->additional_data.backend_timestamp);
f.arrivalTimeStamp = to_nanos(vframe->additional_data.system_time);
auto sts = _tm_dev->SendFrame(f);
if (sts != Status::SUCCESS)
{
Expand All @@ -511,8 +511,8 @@ namespace librealsense
f.sensorIndex = stream_index;
f.temperature = get_md_or_default(RS2_FRAME_METADATA_TEMPERATURE);
f.timestamp = to_nanos(mframe->additional_data.timestamp);
f.systemTimestamp = to_nanos(mframe->additional_data.system_time);
f.arrivalTimeStamp = time_of_arrival;
f.systemTimestamp = to_nanos(mframe->additional_data.backend_timestamp);
f.arrivalTimeStamp = to_nanos(mframe->additional_data.system_time);
auto sts = _tm_dev->SendFrame(f);
if (sts != Status::SUCCESS)
{
Expand All @@ -528,8 +528,8 @@ namespace librealsense
f.sensorIndex = stream_index;
f.temperature = get_md_or_default(RS2_FRAME_METADATA_TEMPERATURE);
f.timestamp = to_nanos(mframe->additional_data.timestamp);
f.systemTimestamp = to_nanos(mframe->additional_data.system_time);
f.arrivalTimeStamp = time_of_arrival;
f.systemTimestamp = to_nanos(mframe->additional_data.backend_timestamp);
f.arrivalTimeStamp = to_nanos(mframe->additional_data.system_time);
auto sts = _tm_dev->SendFrame(f);
if (sts != Status::SUCCESS)
{
Expand Down Expand Up @@ -677,11 +677,13 @@ namespace librealsense
duration<double, std::milli> ts_ms(ts_double_nanos);
auto sys_ts_double_nanos = duration<double, std::nano>(tm_frame.systemTimestamp);
duration<double, std::milli> system_ts_ms(sys_ts_double_nanos);
auto arr_ts_double_nanos = duration<double, std::nano>(tm_frame.arrivalTimeStamp);
duration<double, std::milli> arrival_ts_ms(arr_ts_double_nanos);
video_frame_metadata video_md = { 0 };
video_md.arrival_ts = tm_frame.arrivalTimeStamp;
video_md.exposure_time = tm_frame.exposuretime;

frame_additional_data additional_data(ts_ms.count(), tm_frame.frameId, system_ts_ms.count(), sizeof(video_md), (uint8_t*)&video_md, tm_frame.arrivalTimeStamp, 0 ,0, false);
frame_additional_data additional_data(ts_ms.count(), tm_frame.frameId, arrival_ts_ms.count(), sizeof(video_md), (uint8_t*)&video_md, system_ts_ms.count(), 0 ,0, false);

// Find the frame stream profile
std::shared_ptr<stream_profile_interface> profile = nullptr;
Expand Down Expand Up @@ -712,8 +714,8 @@ namespace librealsense
{
auto video = (video_frame*)(frame.frame);
video->assign(tm_frame.profile.width, tm_frame.profile.height, tm_frame.profile.stride, bpp);
frame->set_timestamp(ts_ms.count());
frame->set_timestamp_domain(RS2_TIMESTAMP_DOMAIN_HARDWARE_CLOCK);
frame->set_timestamp(system_ts_ms.count());
frame->set_timestamp_domain(RS2_TIMESTAMP_DOMAIN_SYSTEM_TIME);
frame->set_stream(profile);
frame->set_sensor(this->shared_from_this()); //TODO? uvc doesn't set it?
video->data.assign(tm_frame.data, tm_frame.data + (tm_frame.profile.height * tm_frame.profile.stride));
Expand Down Expand Up @@ -762,10 +764,12 @@ namespace librealsense
duration<double, std::milli> ts_ms(ts_double_nanos);
auto sys_ts_double_nanos = duration<double, std::nano>(tm_frame.systemTimestamp);
duration<double, std::milli> system_ts_ms(sys_ts_double_nanos);
auto arr_ts_double_nanos = duration<double, std::nano>(tm_frame.arrivalTimeStamp);
duration<double, std::milli> arrival_ts_ms(arr_ts_double_nanos);
pose_frame_metadata frame_md = { 0 };
frame_md.arrival_ts = tm_frame.arrivalTimeStamp;

frame_additional_data additional_data(ts_ms.count(), frame_num++, system_ts_ms.count(), sizeof(frame_md), (uint8_t*)&frame_md, tm_frame.arrivalTimeStamp, 0, 0, false);
frame_additional_data additional_data(ts_ms.count(), frame_num++, arrival_ts_ms.count(), sizeof(frame_md), (uint8_t*)&frame_md, system_ts_ms.count(), 0, 0, false);

// Find the frame stream profile
std::shared_ptr<stream_profile_interface> profile = nullptr;
Expand All @@ -791,8 +795,8 @@ namespace librealsense
if (frame.frame)
{
auto pose_frame = static_cast<librealsense::pose_frame*>(frame.frame);
frame->set_timestamp(ts_ms.count());
frame->set_timestamp_domain(RS2_TIMESTAMP_DOMAIN_HARDWARE_CLOCK);
frame->set_timestamp(system_ts_ms.count());
frame->set_timestamp_domain(RS2_TIMESTAMP_DOMAIN_SYSTEM_TIME);
frame->set_stream(profile);

auto info = reinterpret_cast<librealsense::pose_frame::pose_info*>(pose_frame->data.data());
Expand Down Expand Up @@ -897,11 +901,13 @@ namespace librealsense
duration<double, std::milli> ts_ms(ts_double_nanos);
auto sys_ts_double_nanos = duration<double, std::nano>(tm_frame_ts.systemTimestamp);
duration<double, std::milli> system_ts_ms(sys_ts_double_nanos);
auto arr_ts_double_nanos = duration<double, std::nano>(tm_frame_ts.arrivalTimeStamp);
duration<double, std::milli> arrival_ts_ms(arr_ts_double_nanos);
motion_frame_metadata motion_md = { 0 };
motion_md.arrival_ts = tm_frame_ts.arrivalTimeStamp;
motion_md.temperature = temperature;

frame_additional_data additional_data(ts_ms.count(), frame_number, system_ts_ms.count(), sizeof(motion_md), (uint8_t*)&motion_md, tm_frame_ts.arrivalTimeStamp, 0, 0, false);
frame_additional_data additional_data(ts_ms.count(), frame_number, arrival_ts_ms.count(), sizeof(motion_md), (uint8_t*)&motion_md, system_ts_ms.count(), 0, 0, false);

// Find the frame stream profile
std::shared_ptr<stream_profile_interface> profile = nullptr;
Expand All @@ -926,8 +932,8 @@ namespace librealsense
if (frame.frame)
{
auto motion_frame = static_cast<librealsense::motion_frame*>(frame.frame);
frame->set_timestamp(ts_ms.count());
frame->set_timestamp_domain(RS2_TIMESTAMP_DOMAIN_HARDWARE_CLOCK);
frame->set_timestamp(system_ts_ms.count());
frame->set_timestamp_domain(RS2_TIMESTAMP_DOMAIN_SYSTEM_TIME);
frame->set_stream(profile);
auto data = reinterpret_cast<float*>(motion_frame->data.data());
data[0] = imu_data[0];
Expand Down
26 changes: 3 additions & 23 deletions third-party/libtm/libtm/src/infra/Utils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -49,29 +49,9 @@

nsecs_t systemTime()
{
#ifdef _WIN32
/*
auto start = std::chrono::high_resolution_clock::now();
std::chrono::time_point<std::chrono::high_resolution_clock, std::chrono::nanoseconds> time_point_ns(start);
return time_point_ns.time_since_epoch().count();*/
LARGE_INTEGER StartingTime = { 0 };
LARGE_INTEGER Frequency = { 0 };

QueryPerformanceFrequency(&Frequency);
QueryPerformanceCounter(&StartingTime);

StartingTime.QuadPart *= 1000000LL; // convert to microseconds
StartingTime.QuadPart /= Frequency.QuadPart;
StartingTime.QuadPart *= 1000; // Convert to nano seconds

return StartingTime.QuadPart;

#else
struct timespec ts;
ts.tv_sec = ts.tv_nsec = 0;
clock_gettime(CLOCK_MONOTONIC, &ts);
return ((nsecs_t)(ts.tv_sec)) * 1000000000LL + ts.tv_nsec;
#endif
auto start = std::chrono::system_clock::now();
std::chrono::time_point<std::chrono::system_clock, std::chrono::nanoseconds> time_point_ns(start);
return time_point_ns.time_since_epoch().count();
}


Expand Down