...
 
Commits (3)
......@@ -35,6 +35,7 @@ pkg_check_modules(LIBUVC libuvc)
# @TODO Turn into a find_package FFMPEG file.
pkg_check_modules(FFMPEG libavcodec)
find_package(uvbi)
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
# Compositor backend
......@@ -52,6 +53,7 @@ cmake_dependent_option(BUILD_WITH_OPENGL "Enable OpenGL Graphics API support" ON
set(BUILD_WITH_LIBUSB TRUE)
cmake_dependent_option(BUILD_WITH_JPEG "Enable jpeg code (used for some video drivers)" ON "JPEG_FOUND" OFF)
cmake_dependent_option(BUILD_WITH_UVBI "Enable UVBI-based optical tracking driver" ON "LIBUVC_FOUND AND uvbi_FOUND AND OPENCV_FOUND" OFF)
cmake_dependent_option(BUILD_WITH_OPENCV "Enable OpenCV backend" ON "OpenCV_FOUND" OFF)
cmake_dependent_option(BUILD_WITH_LIBUVC "Enable libuvc video driver" ON "LIBUVC_FOUND" OFF)
cmake_dependent_option(BUILD_WITH_FFMPEG "Enable ffmpeg testing video driver" ON "FFMPEG_FOUND" OFF)
......@@ -116,6 +118,17 @@ if(TRUE)
set(BUILD_DRIVER_PSMV TRUE)
endif()
if(BUILD_WITH_UVBI)
add_definitions(-DXRT_HAVE_UVBI)
endif()
if(BUILD_WITH_OPENCV AND (BUILD_WITH_FFMPEG OR BUILD_WITH_JPEG))
# Condition for enabling the montrack optical tracking driver.
# JPEG required for both UVC and v4l2 backends.
add_definitions(-DXRT_BUILD_MONTRACK)
set(BUILD_DRIVER_MONTRACK TRUE)
endif()
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -pedantic -Wall -Wextra -Wno-unused-parameter")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -Wno-unused-parameter")
......
......@@ -5,8 +5,13 @@
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}/../include
${CMAKE_CURRENT_SOURCE_DIR}/../auxiliary
${CMAKE_CURRENT_SOURCE_DIR}/montrack
${CMAKE_CURRENT_SOURCE_DIR}/montrack/frameservers/common
)
if(BUILD_DRIVER_MONTRACK)
add_subdirectory(montrack)
endif()
if(BUILD_DRIVER_HDK)
set(HDK_SOURCE_FILES
......
set (MONTRACK_SOURCE_FILES
mt_device.c
mt_device.h
mt_interface.h
mt_prober.c
mt_events.h
mt_framequeue.h
mt_framequeue.c
)
add_subdirectory(frameservers)
add_subdirectory(filters)
add_subdirectory(optical_tracking)
# Use OBJECT to not create a archive, since it just gets in the way.
add_library(drv_montrack OBJECT ${MONTRACK_SOURCE_FILES}
$<TARGET_OBJECTS:frameserver>
$<TARGET_OBJECTS:filter>
$<TARGET_OBJECTS:optical_tracking>
)
set_property(TARGET drv_montrack PROPERTY POSITION_INDEPENDENT_CODE ON)
target_include_directories(drv_montrack SYSTEM
PRIVATE frameservers
PRIVATE optical_tracking
PRIVATE filters)
target_link_libraries (drv_montrack frameserver filter optical_tracking)
# Copyright 2019, Collabora, Ltd.
# SPDX-License-Identifier: BSL-1.0
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}/../include
${CMAKE_CURRENT_SOURCE_DIR}/../auxiliary
${CMAKE_CURRENT_SOURCE_DIR}
)
set(FILTER_SOURCE_FILES
common/filter.h
common/filter.c
filter_opencv_kalman.cpp
filter_opencv_kalman.h
)
# Use OBJECT to not create a archive, since it just gets in the way.
add_library(filter OBJECT ${FILTER_SOURCE_FILES})
set_property(TARGET filter PROPERTY POSITION_INDEPENDENT_CODE ON)
target_include_directories(filter SYSTEM
PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}../
PRIVATE ${OpenCV_INCLUDE_DIRS}
)
#include "filter.h"
#include "filter_opencv_kalman.h"
#include <string.h>
#include "util/u_misc.h"
struct filter_instance*
filter_create(filter_type_t t)
{
switch (t) {
case FILTER_TYPE_OPENCV_KALMAN: return filter_opencv_kalman_create();
case FILTER_TYPE_NONE:
default: return NULL;
}
}
bool
filters_test()
{
// create a filter
struct filter_instance* filter =
filter_create(FILTER_TYPE_OPENCV_KALMAN);
if (!filter) {
return false;
}
return true;
}
#ifndef FILTER_H
#define FILTER_H
#include <xrt/xrt_defines.h>
#include "util/u_time.h"
typedef void* filter_configuration_ptr;
struct filter_instance;
struct filter_state
{
struct xrt_pose pose;
bool has_position;
bool has_rotation;
struct xrt_vec3 velocity;
struct xrt_vec3 acceleration;
struct xrt_quat angular_velocity;
struct xrt_quat angular_accel;
timepoint_ns timestamp;
};
typedef enum filter_type
{
FILTER_TYPE_NONE,
FILTER_TYPE_OPENCV_KALMAN
} filter_type_t;
typedef struct tracker_measurement tracker_measurement_t;
struct filter_instance
{
filter_type_t tracker_type;
bool (*queue)(struct filter_instance* inst,
tracker_measurement_t* measurement);
bool (*set_state)(struct filter_instance* inst,
struct filter_state* state);
bool (*get_state)(struct filter_instance* inst,
struct filter_state* state);
bool (*predict_state)(struct filter_instance* inst,
struct filter_state* state,
timepoint_ns time);
bool (*configure)(struct filter_instance* inst,
filter_configuration_ptr config);
void (*destroy)(struct filter_instance* inst);
};
struct filter_instance*
filter_create(filter_type_t t);
bool
filters_test();
static inline void
filter_destroy(struct filter_instance* inst)
{
inst->destroy(inst);
}
static inline bool
filter_queue(struct filter_instance* inst, tracker_measurement_t* measurement)
{
return inst->queue(inst, measurement);
}
static inline bool
filter_set_state(struct filter_instance* inst, struct filter_state* state)
{
return inst->set_state(inst, state);
}
static inline bool
filter_get_state(struct filter_instance* inst, struct filter_state* state)
{
return inst->get_state(inst, state);
}
static inline bool
filter_predict_state(struct filter_instance* inst,
struct filter_state* state,
timepoint_ns time)
{
return inst->predict_state(inst, state, time);
}
static inline bool
filter_configure(struct filter_instance* inst, filter_configuration_ptr config)
{
return inst->configure(inst, config);
}
#endif // FILTER_H
#include <opencv2/opencv.hpp>
#include "../optical_tracking/common/tracker.h"
#include "filter_opencv_kalman.h"
#include "util/u_misc.h"
struct filter_opencv_kalman
{
struct filter_instance base;
bool configured;
opencv_filter_configuration_t configuration;
cv::KalmanFilter kalman_filter;
cv::Mat observation;
cv::Mat prediction;
cv::Mat state;
bool running;
};
/*!
* Casts the internal instance pointer from the generic opaque type to our
* opencv_kalman internal type.
*/
static inline struct filter_opencv_kalman*
filter_opencv_kalman(struct filter_instance* ptr)
{
return (struct filter_opencv_kalman*)ptr;
}
static void
filter_opencv_kalman_destroy(struct filter_instance* inst)
{
free(inst);
}
static bool
filter_opencv_kalman_queue(struct filter_instance* inst,
tracker_measurement_t* measurement)
{
struct filter_opencv_kalman* internal = filter_opencv_kalman(inst);
printf("queueing measurement in filter\n");
internal->observation.at<float>(0, 0) = measurement->pose.position.x;
internal->observation.at<float>(1, 0) = measurement->pose.position.y;
internal->observation.at<float>(2, 0) = measurement->pose.position.z;
internal->kalman_filter.correct(internal->observation);
internal->running = true;
return false;
}
bool
filter_opencv_kalman_get_state(struct filter_instance* inst,
struct filter_state* state)
{
return false;
}
bool
filter_opencv_kalman_set_state(struct filter_instance* inst,
struct filter_state* state)
{
return false;
}
bool
filter_opencv_kalman_predict_state(struct filter_instance* inst,
struct filter_state* state,
timepoint_ns time)
{
struct filter_opencv_kalman* internal = filter_opencv_kalman(inst);
// printf("getting filtered pose\n");
if (!internal->running) {
return false;
}
internal->prediction = internal->kalman_filter.predict();
state->has_position = true;
state->pose.position.x = internal->prediction.at<float>(0, 0);
state->pose.position.y = internal->prediction.at<float>(1, 0);
state->pose.position.z = internal->prediction.at<float>(2, 0);
return true;
}
bool
filter_opencv_kalman_configure(struct filter_instance* inst,
filter_configuration_ptr config_generic)
{
struct filter_opencv_kalman* internal = filter_opencv_kalman(inst);
opencv_filter_configuration_t* config =
(opencv_filter_configuration_t*)config_generic;
internal->configuration = *config;
cv::setIdentity(
internal->kalman_filter.processNoiseCov,
cv::Scalar::all(internal->configuration.process_noise_cov));
cv::setIdentity(
internal->kalman_filter.measurementNoiseCov,
cv::Scalar::all(internal->configuration.measurement_noise_cov));
internal->configured = true;
return true;
}
struct filter_opencv_kalman*
filter_opencv_kalman_create()
{
struct filter_opencv_kalman* i =
U_TYPED_CALLOC(struct filter_opencv_kalman);
if (!i) {
return NULL;
}
i->base.queue = filter_opencv_kalman_queue;
i->base.set_state = filter_opencv_kalman_set_state;
i->base.get_state = filter_opencv_kalman_get_state;
i->base.predict_state = filter_opencv_kalman_predict_state;
i->base.configure = filter_opencv_kalman_configure;
i->base.destroy = filter_opencv_kalman_destroy;
float dt = 1.0;
i->kalman_filter.init(6, 3);
i->observation = cv::Mat(3, 1, CV_32F);
i->prediction = cv::Mat(6, 1, CV_32F);
i->kalman_filter.transitionMatrix =
(cv::Mat_<float>(6, 6) << 1.0, 0.0, 0.0, dt, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, dt, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, dt, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0);
cv::setIdentity(i->kalman_filter.measurementMatrix,
cv::Scalar::all(1.0f));
cv::setIdentity(i->kalman_filter.errorCovPost, cv::Scalar::all(0.0f));
// our filter parameters set the process and measurement noise
// covariances.
cv::setIdentity(i->kalman_filter.processNoiseCov,
cv::Scalar::all(i->configuration.process_noise_cov));
cv::setIdentity(
i->kalman_filter.measurementNoiseCov,
cv::Scalar::all(i->configuration.measurement_noise_cov));
i->configured = false;
i->running = false;
return i;
}
#ifndef FILTER_OPENCV_KALMAN_H
#define FILTER_OPENCV_KALMAN_H
#include <xrt/xrt_defines.h>
#include "common/filter.h"
typedef struct opencv_filter_configuration
{
float measurement_noise_cov;
float process_noise_cov;
} opencv_filter_configuration_t;
typedef struct opencv_kalman_filter_state
{
struct xrt_pose pose;
} opencv_kalman_filter_state_t;
#ifdef __cplusplus
extern "C" {
#endif
// forward declare this, as it contains C++ stuff
struct filter_opencv_kalman;
struct filter_opencv_kalman*
filter_opencv_kalman_create();
#ifdef __cplusplus
} // extern "C"
#endif
#endif // FILTER_OPENCV_KALMAN_H
# Copyright 2019, Collabora, Ltd.
# SPDX-License-Identifier: BSL-1.0
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}/../include
${CMAKE_CURRENT_SOURCE_DIR}/../auxiliary
${CMAKE_CURRENT_SOURCE_DIR}
)
set(FRAMESERVER_SOURCE_FILES
common/frameserver.c
common/frameserver.h
)
if(BUILD_WITH_FFMPEG)
list(APPEND FRAMESERVER_SOURCE_FILES
ffmpeg/ffmpeg_frameserver.c
ffmpeg/ffmpeg_frameserver.h
)
endif()
if(BUILD_WITH_LIBUVC AND BUILD_WITH_JPEG)
list(APPEND FRAMESERVER_SOURCE_FILES
uvc/uvc_frameserver.c
uvc/uvc_frameserver.h
)
endif()
if(BUILD_WITH_JPEG)
list(APPEND FRAMESERVER_SOURCE_FILES
v4l2/v4l2_frameserver.c
v4l2/v4l2_frameserver.h
)
endif()
# Use OBJECT to not create a archive, since it just gets in the way.
add_library(frameserver OBJECT ${FRAMESERVER_SOURCE_FILES})
set_property(TARGET frameserver PROPERTY POSITION_INDEPENDENT_CODE ON)
target_include_directories(frameserver SYSTEM
PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}/..
)
if(BUILD_WITH_LIBUVC AND BUILD_WITH_JPEG)
target_include_directories(frameserver SYSTEM
PRIVATE
${libuvc_INCLUDE_DIRS}
${LIBUSB1_INCLUDE_DIRS}
)
endif()
if(BUILD_WITH_JPEG)
target_include_directories(frameserver SYSTEM
PRIVATE
${JPEG_INCLUDE_DIRS}
)
endif()
// Copyright 2019, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Implementation of frameserver interface and shared functions.
* @author Pete Black <pblack@collabora.com>
* @author Ryan Pavlik <ryan.pavlik@collabora.com>
*/
#include "frameserver.h"
#ifdef XRT_HAVE_FFMPEG
#include "ffmpeg/ffmpeg_frameserver.h"
#endif // XRT_HAVE_FFMPEG
#ifdef XRT_HAVE_LIBUVC
#include "uvc/uvc_frameserver.h"
#endif // XRT_HAVE_LIBUVC
#include "v4l2/v4l2_frameserver.h"
#include <stdio.h>
#include <stdlib.h>
struct frameserver*
frameserver_create(enum frameserver_type t)
{
/*
* Each implementation constructor should set up the members of the
* frameserver instance, as well as return a pointer to itself. If it
* fails, it should return NULL without de-allocating the frameserver
* instance: that is the responsibility of this function.
*/
switch (t) {
#ifdef XRT_HAVE_FFMPEG
case FRAMESERVER_TYPE_FFMPEG: return ffmpeg_frameserver_create();
#endif // XRT_HAVE_FFMPEG
#ifdef XRT_HAVE_LIBUVC
case FRAMESERVER_TYPE_UVC: return uvc_frameserver_create();
#endif // XRT_HAVE_LIBUVC
case FRAMESERVER_TYPE_V4L2: return v4l2_frameserver_create();
case FRAMESERVER_TYPE_NONE:
default: return NULL;
}
}
float
fs_format_bytes_per_pixel(enum fs_frame_format f)
{
switch (f) {
case FS_FORMAT_Y_UINT8: return 1.0f;
case FS_FORMAT_YUV420_UINT8: return 1.5f;
case FS_FORMAT_Y_UINT16:
case FS_FORMAT_YUV422_UINT8:
case FS_FORMAT_YUYV_UINT8: return 2.0f;
case FS_FORMAT_BGR_UINT8:
case FS_FORMAT_RGB_UINT8:
case FS_FORMAT_YUV444_UINT8: return 3.0f;
case FS_FORMAT_RAW:
case FS_FORMAT_JPG:
default:
printf("cannot compute format bytes per pixel\n");
return -1.0f;
}
return -1.0f;
}
int32_t
fs_frame_size_in_bytes(struct fs_frame* f)
{
if (f) {
int32_t frame_bytes = -1;
// TODO: alpha formats, padding etc.
switch (f->format) {
case FS_FORMAT_Y_UINT8:
case FS_FORMAT_YUV420_UINT8:
case FS_FORMAT_Y_UINT16:
case FS_FORMAT_YUV422_UINT8:
case FS_FORMAT_BGR_UINT8:
case FS_FORMAT_RGB_UINT8:
case FS_FORMAT_YUV444_UINT8:
case FS_FORMAT_YUYV_UINT8:
frame_bytes = f->stride * f->height;
break;
case FS_FORMAT_JPG:
// this is a maximum (assuming YUV444)
frame_bytes = f->width * f->height * 3;
break;
case FS_FORMAT_RAW:
case FS_FORMAT_NONE:
default: printf("cannot compute frame size for this format\n");
}
return frame_bytes;
}
return -1;
}
int32_t
fs_frame_bytes_per_pixel(struct fs_frame* f)
{
printf("ERROR: Not implemented\n");
return -1;
}
bool
fs_frame_split_stereo(struct fs_frame* source,
struct fs_frame* left,
struct fs_frame* right)
{
printf("ERROR: Not implemented!\n");
return false;
}
bool
fs_frame_extract_plane(struct fs_frame* source,
enum fs_plane plane,
struct fs_frame* out)
{
// only handle splitting Y out of YUYV for now
if (source->format != FS_FORMAT_YUYV_UINT8 && plane != FS_PLANE_Y) {
printf("ERROR: unhandled plane extraction\n");
return false;
}
if (!source->data) {
printf("ERROR: no frame data!\n");
return false;
}
uint8_t* source_ptr;
uint8_t* dest_ptr;
uint8_t source_pixel_bytes = fs_format_bytes_per_pixel(source->format);
uint32_t source_line_bytes = source->stride;
uint8_t dest_pixel_bytes = fs_format_bytes_per_pixel(out->format);
uint32_t dest_line_bytes = out->width;
if (!out->data) {
printf(
"allocating data for NULL plane - someone needs to free "
"this!\n");
out->data = malloc(fs_frame_size_in_bytes(out));
}
switch (source->format) {
case FS_FORMAT_YUYV_UINT8:
case FS_FORMAT_YUV444_UINT8:
for (uint32_t i = 0; i < source->height; i++) {
for (uint32_t j = 0; j < source->width; j++) {
source_ptr = source->data +
(j * source_pixel_bytes) +
(i * source_line_bytes);
dest_ptr = out->data + (j * dest_pixel_bytes) +
(i * dest_line_bytes);
*dest_ptr = *source_ptr;
}
}
break;
default: return false;
}
return true;
}
bool
fs_frame_resample(struct fs_frame* source, struct fs_frame* out)
{
// TODO: more complete resampling.
if (source->format != FS_FORMAT_YUYV_UINT8 &&
out->format != FS_FORMAT_YUV444_UINT8) {
printf("ERROR: unhandled resample operation\n");
return false;
}
if (!source->data) {
printf("ERROR: no frame data!\n");
return false;
}
uint8_t* source_ptr;
uint8_t* dest_ptr;
uint8_t source_pixel_bytes = fs_format_bytes_per_pixel(source->format);
uint32_t source_line_bytes = source->stride;
uint8_t dest_pixel_bytes = fs_format_bytes_per_pixel(out->format);
uint32_t dest_line_bytes = out->stride;
if (!out->data) {
printf(
"allocating data for NULL plane - someone needs to free "
"this!\n");
out->data = (uint8_t*)malloc(fs_frame_size_in_bytes(out));
}
uint8_t lastU = 0;
switch (source->format) {
case FS_FORMAT_YUYV_UINT8:
for (uint32_t i = 0; i < source->height; i++) {
for (uint32_t j = 0; j < source->width; j++) {
source_ptr = source->data +
(j * source_pixel_bytes) +
(i * source_line_bytes);
dest_ptr = out->data + (j * dest_pixel_bytes) +
(i * dest_line_bytes);
*dest_ptr = *source_ptr; // Y
if (j % 2 == 0) {
*(dest_ptr + 1) =
*(source_ptr + 1); // U
*(dest_ptr + 2) =
*(source_ptr +
3); // V from next source pixel
lastU = *(dest_ptr + 1);
} else {
*(dest_ptr + 1) = lastU;
*(dest_ptr + 2) = *(source_ptr + 1);
}
}
}
return true;
break;
default: return false;
}
return false;
}
bool
frameservers_test()
{
ffmpeg_frameserver_test();
// uvc_frameserver_test();
// v4l2_frameserver_test();
return true;
}
// Copyright 2019, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Header for frameserver interface
* @author Pete Black <pblack@collabora.com>
* @author Ryan Pavlik <ryan.pavlik@collabora.com>
*/
#pragma once
#include "math/m_api.h"
#include "mt_events.h"
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
#define FS_MAX_PLANES 3 // this is what we see currently in e.g. RGB,YUV
// frame
enum fs_frame_format
{
FS_FORMAT_NONE,
FS_FORMAT_RAW,
FS_FORMAT_Y_UINT8,
FS_FORMAT_Y_UINT16,
FS_FORMAT_RGB_UINT8,
FS_FORMAT_BGR_UINT8,
FS_FORMAT_YUYV_UINT8,
FS_FORMAT_YUV444_UINT8,
FS_FORMAT_YUV422_UINT8,
FS_FORMAT_YUV420_UINT8,
FS_FORMAT_JPG
};
enum fs_stereo_format
{
FS_STEREO_NONE,
FS_STEREO_SBS,
FS_STEREO_OAU
};
enum fs_plane
{
FS_PLANE_NONE,
FS_PLANE_R,
FS_PLANE_G,
FS_PLANE_B,
FS_PLANE_Y,
FS_PLANE_U,
FS_PLANE_V
};
enum fs_chroma_sampling
{
FS_CHROMA_SAMP_NONE,
FS_CHROMA_SAMP_444,
FS_CHROMA_SAMP_422,
FS_CHROMA_SAMP_411
};
enum fs_plane_layout
{
FS_PLANE_LAYOUT_COMPOSITE,
FS_PLANE_LAYOUT_SEPARATE
};
enum fs_sampling
{
FS_SAMPLING_NONE,
FS_SAMPLING_UPSAMPLED,
FS_SAMPLING_DOWNSAMPLED
};
// unnormalised pixel coordinates for clipping ROIs
struct fs_frame_rect
{
struct xrt_vec2 tl;
struct xrt_vec2 br;
};
// basic frame data structure - holds a pointer to buffer.
struct fs_frame
{
uint16_t width;
uint16_t height;
uint16_t stride;
enum fs_frame_format format;
enum fs_stereo_format stereo_format;
uint32_t size_bytes;
uint8_t* data;
enum fs_chroma_sampling chroma_sampling; // unused
enum fs_plane_layout plane_layout; // unused
uint8_t* u_data; // unused
uint8_t* v_data; // unused
uint64_t timestamp;
uint64_t source_timestamp;
uint64_t source_sequence; // sequence id
uint64_t source_id; // used to tag frames with the source they
// originated from
};
struct fs_capture_parameters
{
// used to configure cameras. since there is no guarantee every
// frameserver will support any/all of these params, a 'best effort'
// should be made to apply them. all numeric values are normalised
// floats for broad applicability
float gain;
float exposure;
};
// frameserver
enum frameserver_type
{
FRAMESERVER_TYPE_NONE,
FRAMESERVER_TYPE_FFMPEG,
FRAMESERVER_TYPE_UVC,
FRAMESERVER_TYPE_V4L2
};
typedef void* fs_source_descriptor_ptr;
struct frameserver;
typedef void (*fs_frame_consumer_callback_func)(struct frameserver* instance,
struct fs_frame* frame);
struct frameserver
{
enum frameserver_type type;
/*!
* Enumerate all available sources.
*/
bool (*enumerate_sources)(struct frameserver* inst,
fs_source_descriptor_ptr sources,
uint32_t* count);
/*!
*
*/
bool (*configure_capture)(struct frameserver* inst,
struct fs_capture_parameters cp);
/*!
*
*/
bool (*frame_get)(struct frameserver* inst, struct fs_frame* frame);
/*!
*
*/
void (*register_event_callback)(
struct frameserver* inst,
void* target_instance,
event_consumer_callback_func target_func);
/*!
*
*/
bool (*seek)(struct frameserver* inst, uint64_t timestamp);
/*!
*
*/
bool (*stream_start)(struct frameserver* inst,
fs_source_descriptor_ptr source);
/*!
*
*/
bool (*stream_stop)(struct frameserver* inst);
/*!
*
*/
bool (*is_running)(struct frameserver* inst);
void (*destroy)(struct frameserver* inst);
};
struct frameserver* frameserver_create(enum frameserver_type);
static inline bool
frameserver_enumerate_sources(struct frameserver* inst,
fs_source_descriptor_ptr sources,
uint32_t* count)
{
return inst->enumerate_sources(inst, sources, count);
}
static inline bool
frameserver_configure_capture(struct frameserver* inst,
struct fs_capture_parameters cp)
{
return inst->configure_capture(inst, cp);
}
static inline bool
frameserver_frame_get(struct frameserver* inst, struct fs_frame* _frame)
{
return inst->frame_get(inst, _frame);
}
static inline void
frameserver_register_event_callback(struct frameserver* inst,
void* target_instance,
event_consumer_callback_func target_func)
{
inst->register_event_callback(inst, target_instance, target_func);
}
static inline bool
frameserver_seek(struct frameserver* inst, uint64_t timestamp)
{
return inst->seek(inst, timestamp);
}
static inline bool
frameserver_stream_start(struct frameserver* inst,
fs_source_descriptor_ptr source)
{
return inst->stream_start(inst, source);
}
static inline bool
frameserver_stream_stop(struct frameserver* inst)
{
return inst->stream_stop(inst);
}
static inline bool
frameserver_is_running(struct frameserver* inst)
{
return inst->is_running(inst);
}
static inline void
frameserver_destroy(struct frameserver* inst)
{
inst->destroy(inst);
}
int32_t
fs_frame_size_in_bytes(struct fs_frame* f);
int32_t
fs_frame_bytes_per_pixel(struct fs_frame* f);
float
fs_format_bytes_per_pixel(
enum fs_frame_format f); // this is a float to support e.g. YUV420
bool
fs_frame_split_stereo(struct fs_frame* source,
struct fs_frame* left,
struct fs_frame* right);
bool
fs_frame_extract_plane(struct fs_frame* source,
enum fs_plane plane,
struct fs_frame* out);
bool
fs_frame_resample(struct fs_frame* source, struct fs_frame* out);
bool
frameservers_test();
#ifdef __cplusplus
}
#endif
// Copyright 2019, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Header
* @author Pete Black <pblack@collabora.com>
*/
#pragma once
#include "frameserver.h"
#ifdef __cplusplus
extern "C" {
#endif
struct ffmpeg_source_descriptor
{
char name[128];
char* filepath;
uint64_t source_id;
uint32_t current_frame;
uint32_t frame_count;
enum fs_frame_format format;
uint32_t width;
uint32_t height;
};
struct frameserver*
ffmpeg_frameserver_create();
bool
ffmpeg_frameserver_test();
This diff is collapsed.
// Copyright 2019, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Header
* @author Pete Black <pblack@collabora.com>
*/
#pragma once
#include "frameserver.h"
#ifdef __cplusplus
extern "C" {
#endif
// TODO: unify device descriptors across apis
struct uvc_source_descriptor
{
char name[128];
uint16_t vendor_id;
uint16_t product_id;
char serial[128];
uint64_t source_id;
uint32_t uvc_device_index;
/*enum uvc_frame_format*/ int stream_format;
enum fs_frame_format format;
enum fs_sampling sampling;
uint32_t width;
uint32_t height;
uint32_t rate;
};
struct frameserver*
uvc_frameserver_create();
#ifdef __cplusplus
}
#endif
This diff is collapsed.
// Copyright 2019, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Header
* @author Pete Black <pblack@collabora.com>
*/
#pragma once
#include "../common/frameserver.h"
#ifdef __cplusplus
extern "C" {
#endif
struct v4l2_source_descriptor
{
char device_path[256]; // TODO: might not be enough
char name[128];
char model[128];
uint64_t source_id;
enum fs_frame_format format;
uint32_t stream_format;
enum fs_sampling sampling;
uint32_t width;
uint32_t height;
uint32_t rate;
uint8_t extended_format;
uint32_t crop_scanline_bytes_start; // byte offset - special case for
// ps4 camera
uint32_t crop_width; // pixels - special case for ps4 camera
};
struct frameserver*
v4l2_frameserver_create();
This diff is collapsed.
// Copyright 2019, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Interface to internal Monado driver code.
* @author Pete Black <pete.black@collabora.com>
*/
#pragma once
#include "math/m_api.h"
#include "xrt/xrt_device.h"
#include "optical_tracking/common/tracker.h"
#include "frameservers/common/frameserver.h"
#include "filters/common/filter.h"
#ifdef __cplusplus
extern "C" {
#endif
typedef struct mt_device
{
struct xrt_device base;
struct frameserver* frameservers[MAX_FRAMESERVERS];
uint32_t frameserver_count;
tracker_instance_t* tracker;
// TODO: merge these configurations to be descriptive of
// n-source trackers
tracker_mono_configuration_t config_mono;
tracker_stereo_configuration_t config_stereo;
struct filter_instance* filter;
bool log_verbose;
bool log_debug;
} mt_device_t;
XRT_MAYBE_UNUSED static inline mt_device_t*
mt_device(struct xrt_device* xdev)
{
return (mt_device_t*)xdev;
}
mt_device_t*
mt_device_create(char* device_name, bool log_verbose, bool log_debug);
bool
mt_create_mono_ps3eye(mt_device_t* md); // mono blob tracker, ps3 60fps camera
bool
mt_create_mono_c270(
mt_device_t* md); // mono blob tracker, logitech 30fps c270 camera
bool
mt_create_stereo_elp(
mt_device_t* md); // stereo tracker, ELP 60fps stereo camera
bool
mt_create_uvbi_elp(mt_device_t* md); // uvbi tracker, ELP 60fps stereo camera
bool
mt_create_uvbi_hdk(mt_device_t* md); // uvbi tracker, OSVR HDK 100fps IR camera
bool
mt_create_stereo_ps4(
mt_device_t* md); // stereo tracker, PS4 60fps stereo camera
void
mt_handle_event(mt_device_t* md, driver_event_t e);
void
dummy_init_mt_device(mt_device_t* md);
#ifdef __cplusplus
}
#endif
typedef enum driver_event_type
{
EVENT_NONE,
EVENT_FRAMESERVER_GOTFRAME,
EVENT_TRACKER_RECONFIGURED
} driver_event_type_t;
typedef struct driver_event
{
driver_event_type_t type;
// extra data to go along with events
// can be added here
} driver_event_t;
typedef void (*event_consumer_callback_func)(void* instance,
driver_event_t event);
#include "util/u_misc.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "mt_framequeue.h"
static void
frame_array_resize(frame_array_t* fa, uint32_t s);
frame_queue_t*
frame_queue_instance()
{
static frame_queue_t* fq = NULL;
// TODO: locking
if (fq == NULL) {
fq = U_TYPED_CALLOC(frame_queue_t);
if (!fq) {
printf("ERROR: could not malloc!\n");
exit(0);
}
frame_array_init(&fq->frames);
fq->source_id_counter = 0;
}
return fq;
}
uint64_t
frame_queue_uniq_source_id(frame_queue_t* fq)
{
// TODO: locking
return fq->source_id_counter++;
}
struct fs_frame*
frame_queue_ref_latest(frame_queue_t* fq, uint32_t source_id)
{
// find the latest frame from this source, increment its refcount and
// return it
// TODO: locking
uint64_t highest_seq = 0;
uint32_t selected_index = 0;
struct fs_frame* ret = NULL;
for (uint32_t i = 0; i < fq->frames.size; i++) {
struct fs_frame* f = &fq->frames.items[i];
if (f->source_id == source_id &&
f->source_sequence > highest_seq) {
highest_seq = f->source_sequence;
ret = f;
selected_index = i;
}
}
if (ret) {
framedata_t* fd = &fq->frames.refdata[selected_index];
fd->refcount++;
return ret;
}
return NULL;
}
void
frame_queue_unref(frame_queue_t* fq, struct fs_frame* f)
{
// find the frame index, based on the source id and sequence id and
// decrement the corresponding index
// TODO: locking
uint32_t selected_index = 0;
bool found = false;
for (uint32_t i = 0; i < fq->frames.size; i++) {
struct fs_frame* qf = &fq->frames.items[i];
if (qf->source_id == f->source_id &&
qf->source_sequence == f->source_sequence) {
selected_index = i;
found = true;
}
}
if (found) {
framedata_t* fd = &fq->frames.refdata[selected_index];
fd->refcount--;
}
}
void
frame_queue_add(frame_queue_t* fq, struct fs_frame* f)
{
// delete any unrefed frames for this source, then add this new one
// TODO: locking
// update our frame data for this source
if (f->source_id < MAX_FRAME_SOURCES) {
fq->source_frames[f->source_id] = *f;
fq->source_frames[f->source_id].data = NULL;
}
printf("queue add: existing size: %d\n", fq->frames.size);
uint32_t* indices_to_remove =
malloc(sizeof(uint32_t) * fq->frames.size);
uint32_t c_index = 0;
for (uint32_t i = 0; i < fq->frames.size; i++) {
framedata_t* fd = &fq->frames.refdata[i];
printf("checking frame - refcount %d\n", fd->refcount);
if (fd->refcount == 0) {
indices_to_remove[c_index] = i;
c_index++;
}
}
printf("queue marking %d indices for removal\n", c_index);
for (uint32_t i = 0; i < c_index; i++) {
frame_array_delete(&fq->frames, indices_to_remove[i]);
printf("queue deleting frame new size %d\n", fq->frames.size);
}
free(indices_to_remove);
frame_array_add(&fq->frames, f);
printf("queue adding frame new size %d\n", fq->frames.size);
}
void
frame_array_init(frame_array_t* fa)
{
fa->capacity = FRAMEQUEUE_INITIAL_CAPACITY;
fa->size = 0;
fa->items = malloc(sizeof(struct fs_frame) * fa->capacity);
fa->refdata = malloc(sizeof(framedata_t) * fa->capacity);
}
uint32_t
frame_array_size(frame_array_t* fa)
{
return fa->size;
}
void
frame_array_resize(frame_array_t* fa, uint32_t capacity)
{
#ifdef DEBUG_ON
printf("resize: %d to %d\n", fa->capacity, capacity);
#endif
void** new_items =
realloc(fa->items, sizeof(struct fs_frame) * capacity);
void** new_refdata =
realloc(fa->refdata, sizeof(framedata_t) * capacity);
if (new_items && new_refdata) {
fa->items = (struct fs_frame*)new_items;
fa->refdata = (framedata_t*)new_refdata;
fa->capacity = capacity;
}
}
void
frame_array_add(frame_array_t* fa, struct fs_frame* f)
{
if (fa->capacity == fa->size) {
frame_array_resize(
fa, fa->capacity *
2); // alloc double the size on mem exhaustion
}
struct fs_frame* nf = fa->items + fa->size;
memcpy(nf, f, sizeof(struct fs_frame));
framedata_t* fd = fa->refdata + fa->size;
fd->refcount = 0;
fd->buffer = malloc(f->size_bytes);
memcpy(fd->buffer, f->data, f->size_bytes);
nf->data = fd->buffer; // point the queued frames data ptr at our
// refdata
fa->size++;
// printf("adding - new size: %d\n",fa->size);
}
/*void frame_array_set(frame_array_t* fa, uint32_t index, struct fs_frame* f)
{
if (index >= 0 && index < fa->size) {
memcpy( fa->items+index,f,sizeof(struct fs_frame));
}
}*/
struct fs_frame*
frame_array_get(frame_array_t* fa, uint32_t index)
{
if (index < fa->size) {
return &(fa->items[index]);
}
return NULL;
}
void
frame_array_delete(frame_array_t* fa, uint32_t index)
{
if (index >= fa->size) {
return;
}
framedata_t* fd = &fa->refdata[index];
free(fd->buffer);
fd->buffer = NULL;
for (uint32_t i = index; i < fa->size - 1; i++) {
fa->items[i] = fa->items[i + 1];
fa->refdata[i] = fa->refdata[i + 1];
}
fa->size--;
// printf("deleting - new size: %d\n",fa->size);
if (fa->size > 0 &&
fa->size ==
fa->capacity / 4) // reclaim memory on major size reduction
frame_array_resize(fa, fa->capacity / 2);
}
void
frame_array_free(frame_array_t* fa)
{
// free(v->items); //free all the data malloced
}
#ifndef VECTOR_H
#define VECTOR_H
#include "frameservers/common/frameserver.h"
#define FRAMEQUEUE_INITIAL_CAPACITY 32
#define MAX_FRAME_SOURCES 32
typedef struct frame_data
{
uint32_t refcount;
uint8_t* buffer;
} framedata_t;
typedef struct frame_array
{
struct fs_frame* items;
framedata_t* refdata;
uint32_t size;
uint32_t capacity;
} frame_array_t;
typedef struct frame_queue
{
frame_array_t frames;
uint64_t source_id_counter;
struct fs_frame
source_frames[MAX_FRAME_SOURCES]; //'empty' frames (NULL data) that
// act as a per-source descriptor
} frame_queue_t;
frame_queue_t*
frame_queue_instance(); //'singleton' initialisation
void
frame_queue_init();
struct fs_frame*
frame_queue_ref_latest(frame_queue_t* fq,
uint32_t source_id); // used by consumers
void
frame_queue_unref(frame_queue_t* fq, struct fs_frame* f); // used by consumers
void
frame_queue_add(frame_queue_t* fq, struct fs_frame* f); // used by producers
uint64_t
frame_queue_uniq_source_id(frame_queue_t* fq); // used by producers
void
frame_array_init(frame_array_t* fa);
uint32_t
frame_array_total(frame_array_t* fa);
void
frame_array_add(frame_array_t* fa, struct fs_frame* f);
void
frame_array_set(frame_array_t* fa, uint32_t index, struct fs_frame* f);
struct fs_frame*
frame_array_get(frame_array_t* fa, uint32_t index);
void
frame_array_delete(frame_array_t* fa, uint32_t index);
void
frame_array_free(frame_array_t* fa);
#endif
// Copyright 2019, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Interface to Monado internal driver code.
* @author Pete Black <pete.black@collabora.com>
*/
#pragma once
#ifdef __cplusplus
extern "C" {
#endif
struct xrt_auto_prober*
mt_create_auto_prober();
#ifdef __cplusplus
}
#endif
// Copyright 2019, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Monado internal prober code.
* @author Pete Black <pete.black@collabora.com>
*/
#include <stdio.h>
#include <stdlib.h>
#include "xrt/xrt_prober.h"
#include "util/u_misc.h"
#include "util/u_debug.h"
#include "mt_device.h"
DEBUG_GET_ONCE_BOOL_OPTION(mt_verbose, "MT_VERBOSE", false)
DEBUG_GET_ONCE_BOOL_OPTION(mt_debug, "MT_DEBUG", false)
typedef struct mt_prober
{
struct xrt_auto_prober base;
bool log_verbose;
bool log_debug;
} mt_prober_t;
static inline mt_prober_t*
mt_prober(struct xrt_auto_prober* xp)
{
return (mt_prober_t*)xp;
}
static void
mt_prober_destroy(struct xrt_auto_prober* xp)
{
mt_prober_t* mp = mt_prober(xp);
free(mp);
}
static struct xrt_device*
mt_prober_autoprobe(struct xrt_auto_prober* p)
{
// struct mt_prober* mp = mt_prober(p);
// here we would call functions to consult our config, check devices
// are present etc. - for now we will attempt to create a mono blob
// tracker, with any uvc camera we can use
// mt_device_t* mtd = mt_device_create("MONO_LOGITECH_C270",true,true);
// mt_device_t* mtd = mt_device_create("STEREO_ELP_60FPS",true,true);
// mt_device_t* mtd = mt_device_create("MONO_PS3EYE",true,true);
// mt_device_t* mtd =
// mt_device_create("STEREO_LOGITECH_C270",true,true);
// mt_device_t* mtd = mt_device_create("STEREO_PS4_60FPS", true, true);
return NULL;
// return &mtd->base;
}
struct xrt_auto_prober*
mt_create_auto_prober()
{
mt_prober_t* mp = U_TYPED_CALLOC(mt_prober_t);
mp->base.destroy = mt_prober_destroy;
mp->base.lelo_dallas_autoprobe = mt_prober_autoprobe;
mp->log_verbose = debug_get_bool_option_mt_verbose();
mp->log_debug = debug_get_bool_option_mt_debug();
return &mp->base;
}
# Copyright 2019, Collabora, Ltd.
# SPDX-License-Identifier: BSL-1.0
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}
)
set(OPTICAL_TRACKING_SOURCE_FILES
common/calibration.c
common/calibration.h
common/tracked_object.h
common/tracked_object.c
common/tracker.h
common/tracker.c
common/opencv_utils.hpp
tracker3D_sphere_stereo.cpp
tracker3D_sphere_stereo.h
tracker3D_sphere_mono.cpp
tracker3D_sphere_mono.h
track_psvr.h
track_psvr.cpp
)
if(BUILD_WITH_UVBI)
list(APPEND OPTICAL_TRACKING_SOURCE_FILES
tracker3D_uvbi.cpp
tracker3D_uvbi.h
)
endif()
# Use OBJECT to not create a archive, since it just gets in the way.
add_library(optical_tracking OBJECT ${OPTICAL_TRACKING_SOURCE_FILES})
set_property(TARGET optical_tracking PROPERTY POSITION_INDEPENDENT_CODE ON)
target_include_directories(optical_tracking SYSTEM
PRIVATE
${OpenCV_INCLUDE_DIRS}
${OPENCV_INCLUDE_DIRS}
${CMAKE_CURRENT_SOURCE_DIR}/..
)
if(BUILD_WITH_UVBI)
target_link_libraries(optical_tracking PRIVATE uvbi::uvbi_plugin_parts)
endif()
/* TODO: move the current in-tracker calibration here */
#ifndef CALIBRATION_H
#define CALIBRATION_H
/* TODO: move the current in-tracker calibration here */