Commit 8770bea2 authored by Ryan Pavlik's avatar Ryan Pavlik

wip

parent 76d61331
......@@ -36,8 +36,11 @@ set(TRACKING_SOURCE_FILES
tracking/t_imu_fusion.h
tracking/t_imu.h
tracking/t_kalman.cpp
tracking/t_tracker_blobs.cpp
tracking/t_tracker_blobs.h
tracking/t_tracker_psmv_fusion.h
tracking/t_tracker_psmv.cpp
tracking/t_tracker_psvr_fusion.h
tracking/t_tracker_psvr.cpp
tracking/t_tracking.h
)
......
......@@ -11,3 +11,4 @@
#include "t_imu.cpp"
#include "t_tracker_psmv_fusion.cpp"
#include "t_tracker_psvr_fusion.cpp"
// Copyright 2019, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Shared tracker code between PS Move and PSVR.
* @author Pete Black <pblack@collabora.com>
* @author Jakob Bornecrantz <jakob@collabora.com>
* @author Ryan Pavlik <ryan.pavlik@collabora.com>
* @ingroup aux_tracking
*/
#include "t_tracker_blobs.h"
void
tracker_blobs_do_view(CamViewProps const &view_props,
CamViewState &view_state,
cv::Mat &grey,
cv::Mat &rgb)
{
// Undistort the whole image.
cv::remap(grey, // src
view_state.frame_undist, // dst
view_props.undistort_map_x, // map1
view_props.undistort_map_y, // map2
cv::INTER_LINEAR, // interpolation
cv::BORDER_CONSTANT, // borderMode
cv::Scalar(0, 0, 0)); // borderValue
// Rectify the whole image.
cv::remap(view_state.frame_undist, // src
view_state.frame_rectified, // dst
view_props.rectify_map_x, // map1
view_props.rectify_map_y, // map2
cv::INTER_LINEAR, // interpolation
cv::BORDER_CONSTANT, // borderMode
cv::Scalar(0, 0, 0)); // borderValue
cv::threshold(view_state.frame_rectified, // src
view_state.frame_rectified, // dst
32.0, // thresh
255.0, // maxval
0); // type
// tracker_measurement_t m = {};
// Do blob detection with our masks.
//! @todo Re-enable masks.
view_state.sbd->detect(view_state.frame_rectified, // image
view_state.keypoints, // keypoints
cv::noArray()); // mask
// Debug is wanted, draw the keypoints.
if (rgb.cols > 0) {
cv::drawKeypoints(
view_state.frame_rectified, // image
view_state.keypoints, // keypoints
rgb, // outImage
cv::Scalar(255, 0, 0), // color
cv::DrawMatchesFlags::DRAW_RICH_KEYPOINTS); // flags
}
}
// Copyright 2019, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Shared tracker code between PS Move and PSVR.
* @author Pete Black <pblack@collabora.com>
* @author Jakob Bornecrantz <jakob@collabora.com>
* @author Ryan Pavlik <ryan.pavlik@collabora.com>
* @ingroup aux_tracking
*/
#pragma once
#include <opencv2/opencv.hpp>
#include <vector>
/*!
* @brief Helper struct that keeps the value that produces the lowest "score" as
* computed by your functor.
*
* Having this as a struct with a method, instead of a single "algorithm"-style
* function, allows you to keep your complicated filtering logic in your own
* loop, just calling in when you have a new candidate for "best".
*
* @note Create by calling make_lowest_score_finder() with your
* function/lambda that takes an element and returns the score, to deduce the
* un-spellable typename of the lambda.
*
* @tparam ValueType The type of a single element value - whatever you want to
* assign a score to.
* @tparam FunctionType The type of your functor/lambda that turns a ValueType
* into a float "score". Usually deduced.
*/
template <typename ValueType, typename FunctionType> struct FindLowestScore
{
const FunctionType score_functor;
bool got_one{false};
ValueType best{};
float best_score{0};
void
handle_candidate(ValueType val)
{
float score = score_functor(val);
if (!got_one || score < best_score) {
best = val;
best_score = score;
got_one = true;
}
}
};
//! Factory function for FindLowestScore to deduce the functor type.
template <typename ValueType, typename FunctionType>
static inline FindLowestScore<ValueType, FunctionType>
make_lowest_score_finder(FunctionType scoreFunctor)
{
return FindLowestScore<ValueType, FunctionType>{scoreFunctor};
}
/*!
* Single camera.
*/
struct CamViewProps
{
cv::Mat undistort_map_x;
cv::Mat undistort_map_y;
cv::Mat rectify_map_x;
cv::Mat rectify_map_y;
};
struct CamViewState
{
std::vector<cv::KeyPoint> keypoints;
cv::Mat frame_undist;
cv::Mat frame_rectified;
cv::Ptr<cv::SimpleBlobDetector> sbd;
};
void
tracker_blobs_do_view(CamViewProps const &view_props,
CamViewState &view_state,
cv::Mat &grey,
cv::Mat &rgb);
......@@ -5,6 +5,7 @@
* @brief PS Move tracker code.
* @author Pete Black <pblack@collabora.com>
* @author Jakob Bornecrantz <jakob@collabora.com>
* @author Ryan Pavlik <ryan.pavlik@collabora.com>
* @ingroup aux_tracking
*/
......@@ -170,50 +171,7 @@ do_view(TrackerPSMV &t, View &view, cv::Mat &grey, cv::Mat &rgb)
}
}
/*!
* @brief Helper struct that keeps the value that produces the lowest "score" as
* computed by your functor.
*
* Having this as a struct with a method, instead of a single "algorithm"-style
* function, allows you to keep your complicated filtering logic in your own
* loop, just calling in when you have a new candidate for "best".
*
* @note Create by calling make_lowest_score_finder() with your
* function/lambda that takes an element and returns the score, to deduce the
* un-spellable typename of the lambda.
*
* @tparam ValueType The type of a single element value - whatever you want to
* assign a score to.
* @tparam FunctionType The type of your functor/lambda that turns a ValueType
* into a float "score". Usually deduced.
*/
template <typename ValueType, typename FunctionType> struct FindLowestScore
{
const FunctionType score_functor;
bool got_one{false};
ValueType best{};
float best_score{0};
void
handle_candidate(ValueType val)
{
float score = score_functor(val);
if (!got_one || score < best_score) {
best = val;
best_score = score;
got_one = true;
}
}
};
//! Factory function for FindLowestScore to deduce the functor type.
template <typename ValueType, typename FunctionType>
static FindLowestScore<ValueType, FunctionType>
make_lowest_score_finder(FunctionType scoreFunctor)
{
return FindLowestScore<ValueType, FunctionType>{scoreFunctor};
}
//! Convert our 2d point + disparities into 3d points.
static cv::Point3f
......@@ -476,6 +434,7 @@ frame(TrackerPSMV &t, struct xrt_frame *xf)
}
xrt_frame_reference(&t.frame, xf);
// Wake up the thread.
os_thread_helper_signal_locked(&t.oth);
......
......@@ -29,13 +29,13 @@
#include "flexkalman/PoseState.h"
using State = flexkalman::pose_externalized_rotation::State;
using ProcessModel =
using PSMVState = flexkalman::pose_externalized_rotation::State;
using PSMVProcessModel =
flexkalman::PoseSeparatelyDampedConstantVelocityProcessModel;
namespace xrt_fusion {
struct TrackingInfo
struct PSMVTrackingInfo
{
bool valid{false};
bool tracked{false};
......@@ -73,14 +73,14 @@ namespace {
void
reset_filter_and_imu();
State filter_state;
ProcessModel process_model;
PSMVState filter_state;
PSMVProcessModel process_model;
xrt_fusion::SimpleIMUFusion imu;
bool tracked{false};
TrackingInfo orientation_state;
TrackingInfo position_state;
PSMVTrackingInfo orientation_state;
PSMVTrackingInfo position_state;
};
......@@ -94,15 +94,15 @@ namespace {
void
PSMVFusion::reset_filter()
{
filter_state = State{};
filter_state = PSMVState{};
tracked = false;
position_state = TrackingInfo{};
position_state = PSMVTrackingInfo{};
}
void
PSMVFusion::reset_filter_and_imu()
{
reset_filter();
orientation_state = TrackingInfo{};
orientation_state = PSMVTrackingInfo{};
imu = SimpleIMUFusion{};
}
......@@ -173,7 +173,7 @@ namespace {
fprintf(
stderr,
"Warning - measurement residual is %f, resetting "
"filter state\n",
"filter PSMVstate\n",
resid);
reset_filter();
return;
......@@ -244,4 +244,4 @@ PSMVFusionInterface::create()
auto ret = std::make_unique<PSMVFusion>();
return ret;
}
} // namespace xrt_fusion
\ No newline at end of file
} // namespace xrt_fusion
......@@ -3,14 +3,19 @@
/*!
* @file
* @brief PSVR tracker code.
* @author Pete Black <pblack@collabora.com>
* @author Jakob Bornecrantz <jakob@collabora.com>
* @author Ryan Pavlik <ryan.pavlik@collabora.com>
* @ingroup aux_tracking
*/
#include "xrt/xrt_tracking.h"
#include "tracking/t_tracking.h"
#include "tracking/t_calibration_opencv.h"
#include "tracking/t_tracker_psvr_fusion.h"
#include "util/u_var.h"
#include "util/u_misc.h"
#include "util/u_debug.h"
#include "util/u_frame.h"
......@@ -24,6 +29,21 @@
#include <assert.h>
#include <pthread.h>
/*!
* Single camera.
*/
struct View
{
cv::Mat undistort_map_x;
cv::Mat undistort_map_y;
cv::Mat rectify_map_x;
cv::Mat rectify_map_y;
std::vector<cv::KeyPoint> keypoints;
cv::Mat frame_undist;
cv::Mat frame_rectified;
};
class TrackerPSVR
{
......@@ -38,6 +58,16 @@ public:
//! Thread and lock helper.
struct os_thread_helper oth;
bool tracked = false;
struct
{
struct xrt_frame_sink *sink;
struct xrt_frame *frame;
cv::Mat rgb[2];
} debug;
//! Have we received a new IMU sample.
bool has_imu = false;
......@@ -46,19 +76,155 @@ public:
struct xrt_vec3 pos = {};
struct xrt_quat rot = {};
} fusion;
View view[2];
bool calibrated;
cv::Mat disparity_to_depth;
cv::Ptr<cv::SimpleBlobDetector> sbd;
std::unique_ptr<xrt_fusion::PSVRFusionInterface> filter;
xrt_vec3 tracked_object_position;
};
static void
procces(TrackerPSVR &t, struct xrt_frame *xf)
process(TrackerPSVR &t, struct xrt_frame *xf)
{
// Only IMU data
if (xf == NULL) {
return;
}
// Wrong type of frame: unreference and return?
if (xf->format != XRT_FORMAT_L8) {
xrt_frame_reference(&xf, NULL);
return;
}
if (!t.calibrated) {
bool ok = calibration_get_stereo(
"PS4_EYE", // name
xf->width, // width
xf->height, // height
false, // use_fisheye
&t.view[0].undistort_map_x, // l_undistort_map_x
&t.view[0].undistort_map_y, // l_undistort_map_y
&t.view[0].rectify_map_x, // l_rectify_map_x
&t.view[0].rectify_map_y, // l_rectify_map_y
&t.view[1].undistort_map_x, // r_undistort_map_x
&t.view[1].undistort_map_y, // r_undistort_map_y
&t.view[1].rectify_map_x, // r_rectify_map_x
&t.view[1].rectify_map_y, // r_rectify_map_y
&t.disparity_to_depth); // disparity_to_depth
if (ok) {
printf("loaded calibration for camera!\n");
t.calibrated = true;
} else {
xrt_frame_reference(&xf, NULL);
return;
}
}
// Create the debug frame if needed.
// refresh_gui_frame(t, xf);
t.view[0].keypoints.clear();
t.view[1].keypoints.clear();
int cols = xf->width / 2;
int rows = xf->height;
int stride = xf->stride;
cv::Mat l_grey(rows, cols, CV_8UC1, xf->data, stride);
cv::Mat r_grey(rows, cols, CV_8UC1, xf->data + cols, stride);
do_view(t, t.view[0], l_grey, t.debug.rgb[0]);
do_view(t, t.view[1], r_grey, t.debug.rgb[1]);
cv::Point3f last_point(t.tracked_object_position.x,
t.tracked_object_position.y,
t.tracked_object_position.z);
auto nearest_world =
make_lowest_score_finder<cv::Point3f>([&](cv::Point3f world_point) {
//! @todo don't really need the square root to be done here.
return cv::norm(world_point - last_point);
});
// do some basic matching to come up with likely disparity-pairs.
const cv::Matx44d disparity_to_depth =
static_cast<cv::Matx44d>(t.disparity_to_depth);
for (const cv::KeyPoint &l_keypoint : t.view[0].keypoints) {
cv::Point2f l_blob = l_keypoint.pt;
auto nearest_blob = make_lowest_score_finder<cv::Point2f>(
[&](cv::Point2f r_blob) { return l_blob.x - r_blob.x; });
for (const cv::KeyPoint &r_keypoint : t.view[1].keypoints) {
cv::Point2f r_blob = r_keypoint.pt;
// find closest point on same-ish scanline
if ((l_blob.y < r_blob.y + 3) &&
(l_blob.y > r_blob.y - 3)) {
nearest_blob.handle_candidate(r_blob);
}
}
//! @todo do we need to avoid claiming the same counterpart
//! several times?
if (nearest_blob.got_one) {
cv::Point3f pt = world_point_from_blobs(
l_blob, nearest_blob.best, disparity_to_depth);
nearest_world.handle_candidate(pt);
}
}
if (nearest_world.got_one) {
cv::Point3f world_point = nearest_world.best;
// update internal state
memcpy(&t.tracked_object_position, &world_point.x,
sizeof(t.tracked_object_position));
} else {
t.filter->clear_position_tracked_flag();
}
if (t.debug.frame != NULL) {
t.debug.sink->push_frame(t.debug.sink, t.debug.frame);
t.debug.rgb[0] = cv::Mat();
t.debug.rgb[1] = cv::Mat();
}
xrt_frame_reference(&xf, NULL);
xrt_frame_reference(&t.debug.frame, NULL);
if (nearest_world.got_one) {
#if 0
//! @todo something less arbitrary for the lever arm?
//! This puts the origin approximately under the PS
//! button.
xrt_vec3 lever_arm{0.f, 0.09f, 0.f};
//! @todo this should depend on distance
// Weirdly, this is where *not* applying the
// disparity-to-distance/rectification/etc would
// simplify things, since the measurement variance is
// related to the image sensor. 1.e-4 means 1cm std dev.
// Not sure how to estimate the depth variance without
// some research.
xrt_vec3 variance{1.e-4f, 1.e-4f, 4.e-4f};
#endif
t.filter->process_3d_vision_data(
0, &t.tracked_object_position, NULL, NULL,
//! @todo tune cutoff for residual arbitrarily "too large"
15);
} else {
t.filter->clear_position_tracked_flag();
}
}
/*!
* @brief Tracker processing thread function
*/
static void
run(TrackerPSVR &t)
{
......@@ -86,7 +252,7 @@ run(TrackerPSVR &t)
// Unlock the mutex when we do the work.
os_thread_helper_unlock(&t.oth);
procces(t, frame);
process(t, frame);
// Have to lock it again.
os_thread_helper_lock(&t.oth);
......@@ -95,6 +261,9 @@ run(TrackerPSVR &t)
os_thread_helper_unlock(&t.oth);
}
/*!
* @brief Retrieves a pose from the filter.
*/
static void
get_pose(TrackerPSVR &t,
struct time_state *timestate,
......@@ -223,6 +392,9 @@ t_psvr_node_destroy(struct xrt_frame_node *node)
os_thread_helper_destroy(&t_ptr->oth);
// Tidy variable setup.
u_var_remove_root(t_ptr);
delete t_ptr;
}
......@@ -245,14 +417,7 @@ extern "C" int
t_psvr_start(struct xrt_tracked_psvr *xtmv)
{
auto &t = *container_of(xtmv, TrackerPSVR, base);
int ret;
ret = os_thread_helper_start(&t.oth, t_psvr_run, &t);
if (ret != 0) {
return ret;
}
return ret;
return os_thread_helper_start(&t.oth, t_psvr_run, &t);
}
extern "C" int
......@@ -272,6 +437,7 @@ t_psvr_create(struct xrt_frame_context *xfctx,
t.node.break_apart = t_psvr_node_break_apart;
t.node.destroy = t_psvr_node_destroy;
t.fusion.rot.w = 1.0f;
t.filter = xrt_fusion::PSVRFusionInterface::create();
ret = os_thread_helper_init(&t.oth);
if (ret != 0) {
......@@ -289,8 +455,13 @@ t_psvr_create(struct xrt_frame_context *xfctx,
t.fusion.rot.z = 0.0f;
t.fusion.rot.w = 0.0f;
t.sbd = cv::SimpleBlobDetector::create(blob_params);
xrt_frame_context_add(xfctx, &t.node);
// Everything is safe, now setup the variable tracking.
u_var_add_root(&t, "PSVR Tracker", true);
u_var_add_sink(&t, &t.debug.sink, "Debug");
*out_sink = &t.sink;
*out_xtmv = &t.base;
......
// Copyright 2019, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief PSVR tracker code that is expensive to compile.
*
* Typically built as a part of t_kalman.cpp to reduce incremental build times.
*
* @author Ryan Pavlik <ryan.pavlik@collabora.com>
* @author Pete Black <pblack@collabora.com>
* @author Jakob Bornecrantz <jakob@collabora.com>
* @ingroup aux_tracking
*/
#include "t_tracker_psvr_fusion.h"
#include "tracking/t_fusion.h"
#include "tracking/t_imu_fusion.h"
#include "math/m_api.h"
#include "math/m_eigen_interop.h"
#include "util/u_misc.h"
#include "flexkalman/AbsoluteOrientationMeasurement.h"
#include "flexkalman/FlexibleKalmanFilter.h"
#include "flexkalman/FlexibleUnscentedCorrect.h"
#include "flexkalman/PoseSeparatelyDampedConstantVelocity.h"
#include "flexkalman/PoseState.h"
using PSVRState = flexkalman::pose_externalized_rotation::State;
using PSVRProcessModel =