Merge pull request #2079 from manoskyr/optic_flow_changes

Optic flow changes
This commit is contained in:
Gautier Hattenberger
2017-06-21 00:33:08 +02:00
committed by GitHub
26 changed files with 337 additions and 144 deletions
+1
View File
@@ -5,6 +5,7 @@
<description>Find a colored item and track its geo-location and update a waypoint to it</description>
<define name="BLOB_LOCATOR_CAMERA" value="front_camera|bottom_camera" description="Video device to use"/>
<define name="BLOB_LOCATOR_FPS" value="0" description="The (maximum) frequency to run the calculations at. If zero, it will max out at the camera frame rate"/>
</doc>
<settings>
<dl_settings>
+1
View File
@@ -4,6 +4,7 @@
<doc>
<description>ColorFilter</description>
<define name="COLORFILTER_CAMERA" value="front_camera|bottom_camera" description="Video device to use"/>
<define name="COLORFILTER_FPS" value="0" description="The (maximum) frequency to run the calculations at. If zero, it will max out at the camera frame rate"/>
</doc>
<settings>
+1
View File
@@ -6,6 +6,7 @@
</description>
<define name="DETECT_CONTOUR_CAMERA" value="front_camera|bottom_camera" description="Video device to use"/>
<define name="DETECT_CONTOUR_FPS" value="0" description="The (maximum) frequency to run the calculations at. If zero, it will max out at the camera frame rate"/>
</doc>
<header>
<file name="detect_contour.h"/>
+2
View File
@@ -5,6 +5,8 @@
<description>
Detect window
</description>
<define name="DETECT_WINDOW_CAMERA" value="front_camera|bottom_camera" description="Video device to use"/>
<define name="DETECT_WINDOW_FPS" value="0" description="The (maximum) frequency to run the calculations at. If zero, it will max out at the camera frame rate"/>
</doc>
<header>
+1
View File
@@ -9,6 +9,7 @@
</description>
<define name="OPENCVDEMO_CAMERA" value="front_camera|bottom_camera" description="Video device to use"/>
<define name="OPENCVDEMO_FPS" value="0" description="The (maximum) frequency to run the calculations at. If zero, it will max out at the camera frame rate"/>
</doc>
<header>
<file name="cv_opencvdemo.h"/>
+7
View File
@@ -36,6 +36,8 @@
<define name="MEDIAN_FILTER" value="0" description="A median filter on the resulting velocities to be turned on or off (last 5 measurements)"/>
<define name="KALMAN_FILTER" value="1" description="A kalman filter on the resulting velocities to be turned on or off (fused with accelerometers)"/>
<define name="KALMAN_FILTER_PROCESS_NOISE" value="0.01" description="The expected variance of the error of the model's prediction in the kalman filter"/>
<define name="FEATURE_MANAGEMENT" value="1" description="Whether to keep already tracked corners in memory for the next frame or re-detect new ones every time"/>
<define name="FPS" value="0" description="The (maximum) frequency to run the calculations at. If zero, it will max out at the camera frame rate"/>
<!-- Lucas Kanade optical flow calculation parameters -->
<define name="MAX_TRACK_CORNERS" value="25" description="The maximum amount of corners the Lucas Kanade algorithm is tracking between two frames"/>
@@ -47,6 +49,8 @@
<define name="FAST9_THRESHOLD" value="20" description="FAST9 default threshold"/>
<define name="FAST9_MIN_DISTANCE" value="10" description="The amount of pixels between corners that should be detected"/>
<define name="FAST9_PADDING" value="20" description="The outer border in which no corners will be searched"/>
<define name="FAST9_REGION_DETECT" value="1" description="Whether to detect fast9 corners in regions of interest or the whole image (only works with feature management)"/>
<define name="FAST9_NUM_REGIONS" value="9" description="The number of regions of interest to split the image into"/>
</section>
</doc>
@@ -64,6 +68,7 @@
<dl_setting var="opticflow.median_filter" module="computer_vision/opticflow_module" min="0" step="1" max="1" values="OFF|ON" shortname="median_filter" param="OPTICFLOW_MEDIAN_FILTER"/>
<dl_setting var="opticflow.kalman_filter" module="computer_vision/opticflow_module" min="0" step="1" max="1" values="OFF|ON" shortname="kalman_filter" param="OPTICFLOW_KALMAN_FILTER"/>
<dl_setting var="opticflow.kalman_filter_process_noise" module="computer_vision/opticflow_module" min="0.0001" step="0.0001" max="0.1" shortname="KF_process_noise" param="OPTICFLOW_KALMAN_FILTER_PROCESS_NOISE"/>
<dl_setting var="opticflow.feature_management" module="computer_vision/opticflow_module" min="0" step="1" max="1" values="OFF|ON" shortname="feature_management" param="OPTICFLOW_FEATURE_MANAGEMENT"/>
<!-- Specifically for Lucas Kanade and FAST9 -->
<dl_setting var="opticflow.max_track_corners" module="computer_vision/opticflow_module" min="0" step="1" max="500" shortname="max_trck_corners" param="OPTICFLOW_MAX_TRACK_CORNERS"/>
@@ -74,6 +79,8 @@
<dl_setting var="opticflow.fast9_threshold" module="computer_vision/opticflow_module" min="0" step="1" max="255" shortname="fast9_threshold" param="OPTICFLOW_FAST9_THRESHOLD"/>
<dl_setting var="opticflow.fast9_min_distance" module="computer_vision/opticflow_module" min="0" step="1" max="500" shortname="fast9_min_distance" param="OPTICFLOW_FAST9_MIN_DISTANCE"/>
<dl_setting var="opticflow.fast9_padding" module="computer_vision/opticflow_module" min="0" step="1" max="50" shortname="fast9_padding" param="OPTICFLOW_FAST9_PADDING"/>
<dl_setting var="opticflow.fast9_region_detect" module="computer_vision/opticflow_module" min="0" step="1" max="1" values="OFF|ON" shortname="fast9_region_detect" param="OPTICFLOW_FAST9_REGION_DETECT"/>
<dl_setting var="opticflow.fast9_num_regions" module="computer_vision/opticflow_module" min="1" step="1" max="25" shortname="fast9_num_regions" param="OPTICFLOW_FAST9_NUM_REGIONS"/>
<!-- Changes pyramid level of lucas kanade optical flow. -->
+1
View File
@@ -8,6 +8,7 @@
A telemetry message with the code content is sent when a QR code is detected when qrscan is called.
</description>
<define name="QRCODE_CAMERA" value="front_camera|bottom_camera" description="The V4L2 camera device that is used for searching a QR code"/>
<define name="QRCODE_FPS" value="0" description="The (maximum) frequency to run the calculations at. If zero, it will max out at the camera frame rate"/>
<define name="QRCODE_DRAW_RECTANGLE" value="TRUE|FALSE" description="Whether or not to draw a rectangle around a found QR code"/>
</doc>
+1
View File
@@ -12,6 +12,7 @@
<define name="VIDEO_CAPTURE_CAMERA" value="front_camera|bottom_camera" description="Video device to use"/>
<define name="VIDEO_CAPTURE_PATH" value="/data/video/images" description="Location to save images"/>
<define name="VIDEO_CAPTURE_JPEG_QUALITY" value="99" description="JPEG quality of images"/>
<define name="VIDEO_CAPTURE_FPS" value="0" description="The (maximum) frequency to run the calculations at. If zero, it will max out at the camera frame rate"/>
</doc>
<settings>
+1
View File
@@ -11,6 +11,7 @@
<define name="VIDEO_USB_LOGGER_WIDTH" value="272" description="Size of the to log images"/>
<define name="VIDEO_USB_LOGGER_HEIGHTH" value="272" description="Size of the to log images"/>
<define name="VIDEO_USB_LOGGER_JPEG_WITH_EXIF_HEADER" value="TRUE" description="Whether to store data in the exif header or not"/>
<define name="VIDEO_USB_LOGGER_FPS" value="0" description="The (maximum) frequency to run the calculations at. If zero, it will max out at the camera frame rate"/>
</doc>
<depends>video_thread,pose_history</depends>
<header>
@@ -29,6 +29,11 @@
#include "modules/computer_vision/lib/vision/image.h"
#ifndef COLORFILTER_FPS
#define COLORFILTER_FPS 0 ///< Default FPS (zero means run at camera fps)
#endif
PRINT_CONFIG_VAR(COLORFILTER_FPS)
struct video_listener *listener = NULL;
// Filter Settings
@@ -58,5 +63,5 @@ struct image_t *colorfilter_func(struct image_t *img)
void colorfilter_init(void)
{
listener = cv_add_to_device(&COLORFILTER_CAMERA, colorfilter_func);
}
listener = cv_add_to_device(&COLORFILTER_CAMERA, colorfilter_func, COLORFILTER_FPS);
}
+4 -4
View File
@@ -43,7 +43,7 @@ static inline uint32_t timeval_diff(struct timeval *A, struct timeval *B)
}
struct video_listener *cv_add_to_device(struct video_config_t *device, cv_function func)
struct video_listener *cv_add_to_device(struct video_config_t *device, cv_function func, uint16_t fps)
{
// Create a new video listener
struct video_listener *new_listener = malloc(sizeof(struct video_listener));
@@ -53,7 +53,7 @@ struct video_listener *cv_add_to_device(struct video_config_t *device, cv_functi
new_listener->func = func;
new_listener->next = NULL;
new_listener->async = NULL;
new_listener->maximum_fps = 0;
new_listener->maximum_fps = fps;
// Initialise the device that we want our function to use
add_video_device(device);
@@ -79,10 +79,10 @@ struct video_listener *cv_add_to_device(struct video_config_t *device, cv_functi
}
struct video_listener *cv_add_to_device_async(struct video_config_t *device, cv_function func, int nice_level)
struct video_listener *cv_add_to_device_async(struct video_config_t *device, cv_function func, int nice_level, uint16_t fps)
{
// Create a normal listener
struct video_listener *listener = cv_add_to_device(device, func);
struct video_listener *listener = cv_add_to_device(device, func, fps);
// Add asynchronous structure to override default synchronous behavior
listener->async = malloc(sizeof(struct cv_async));
+2 -2
View File
@@ -61,8 +61,8 @@ struct video_listener {
extern bool add_video_device(struct video_config_t *device);
extern struct video_listener *cv_add_to_device(struct video_config_t *device, cv_function func);
extern struct video_listener *cv_add_to_device_async(struct video_config_t *device, cv_function func, int nice_level);
extern struct video_listener *cv_add_to_device(struct video_config_t *device, cv_function func, uint16_t fps);
extern struct video_listener *cv_add_to_device_async(struct video_config_t *device, cv_function func, int nice_level, uint16_t fps);
extern void cv_run_device(struct video_config_t *device, struct image_t *img);
@@ -23,6 +23,11 @@
* Find a colored item and track its geo-location and update a waypoint to it
*/
#ifndef BLOB_LOCATOR_FPS
#define BLOB_LOCATOR_FPS 0 ///< Default FPS (zero means run at camera fps)
#endif
PRINT_CONFIG_VAR(BLOB_LOCATOR_FPS)
#include "modules/computer_vision/cv_blob_locator.h"
#include "modules/computer_vision/cv.h"
#include "modules/computer_vision/blob/blob_finder.h"
@@ -53,11 +58,13 @@ volatile bool marker_enabled = false;
volatile bool window_enabled = false;
// Computer vision thread
struct image_t* cv_marker_func(struct image_t *img);
struct image_t* cv_marker_func(struct image_t *img) {
struct image_t *cv_marker_func(struct image_t *img);
struct image_t *cv_marker_func(struct image_t *img)
{
if (!marker_enabled)
if (!marker_enabled) {
return NULL;
}
struct marker_deviation_t m = marker(img, marker_size);
@@ -73,14 +80,16 @@ struct image_t* cv_marker_func(struct image_t *img) {
// Computer vision thread
struct image_t* cv_window_func(struct image_t *img);
struct image_t* cv_window_func(struct image_t *img) {
struct image_t *cv_window_func(struct image_t *img);
struct image_t *cv_window_func(struct image_t *img)
{
if (!window_enabled)
if (!window_enabled) {
return NULL;
}
uint16_t coordinate[2] = {0,0};
uint16_t coordinate[2] = {0, 0};
uint16_t response = 0;
uint32_t integral_image[img->w * img->h];
@@ -88,7 +97,7 @@ struct image_t* cv_window_func(struct image_t *img) {
image_create(&gray, img->w, img->h, IMAGE_GRAYSCALE);
image_to_grayscale(img, &gray);
response = detect_window_sizes( (uint8_t*)gray.buf, (uint32_t)img->w, (uint32_t)img->h, coordinate, integral_image, MODE_BRIGHT);
response = detect_window_sizes((uint8_t *)gray.buf, (uint32_t)img->w, (uint32_t)img->h, coordinate, integral_image, MODE_BRIGHT);
image_free(&gray);
@@ -98,13 +107,13 @@ struct image_t* cv_window_func(struct image_t *img) {
if (response < 92) {
for (int y = 0; y < img->h-1; y++) {
for (int y = 0; y < img->h - 1; y++) {
Img(px, y) = 65;
Img(px+1, y) = 255;
Img(px + 1, y) = 255;
}
for (int x = 0; x < img->w-1; x+=2) {
for (int x = 0; x < img->w - 1; x += 2) {
Img(x, py) = 65;
Img(x+1, py) = 255;
Img(x + 1, py) = 255;
}
uint32_t temp = coordinate[0];
@@ -118,11 +127,13 @@ struct image_t* cv_window_func(struct image_t *img) {
}
struct image_t* cv_blob_locator_func(struct image_t *img);
struct image_t* cv_blob_locator_func(struct image_t *img) {
struct image_t *cv_blob_locator_func(struct image_t *img);
struct image_t *cv_blob_locator_func(struct image_t *img)
{
if (!blob_enabled)
if (!blob_enabled) {
return NULL;
}
// Color Filter
@@ -137,9 +148,9 @@ struct image_t* cv_blob_locator_func(struct image_t *img) {
// Output image
struct image_t dst;
image_create(&dst,
img->w,
img->h,
IMAGE_GRADIENT);
img->w,
img->h,
IMAGE_GRADIENT);
// Labels
uint16_t labels_count = 512;
@@ -152,7 +163,7 @@ struct image_t* cv_blob_locator_func(struct image_t *img) {
int largest_size = 0;
// Find largest
for (int i=0; i<labels_count; i++) {
for (int i = 0; i < labels_count; i++) {
// Only consider large blobs
if (labels[i].pixel_cnt > 50) {
if (labels[i].pixel_cnt > largest_size) {
@@ -162,21 +173,20 @@ struct image_t* cv_blob_locator_func(struct image_t *img) {
}
}
if (largest_id >= 0)
{
uint8_t *p = (uint8_t*) img->buf;
uint16_t* l = (uint16_t*) dst.buf;
for (int y=0;y<dst.h;y++) {
for (int x=0;x<dst.w/2;x++) {
if (l[y*dst.w+x] != 0xffff) {
uint8_t c=0xff;
if (l[y*dst.w+x] == largest_id) {
if (largest_id >= 0) {
uint8_t *p = (uint8_t *) img->buf;
uint16_t *l = (uint16_t *) dst.buf;
for (int y = 0; y < dst.h; y++) {
for (int x = 0; x < dst.w / 2; x++) {
if (l[y * dst.w + x] != 0xffff) {
uint8_t c = 0xff;
if (l[y * dst.w + x] == largest_id) {
c = 0;
}
p[y*dst.w*2+x*4]=c;
p[y*dst.w*2+x*4+1]=0x80;
p[y*dst.w*2+x*4+2]=c;
p[y*dst.w*2+x*4+3]=0x80;
p[y * dst.w * 2 + x * 4] = c;
p[y * dst.w * 2 + x * 4 + 1] = 0x80;
p[y * dst.w * 2 + x * 4 + 2] = c;
p[y * dst.w * 2 + x * 4 + 3] = 0x80;
}
}
}
@@ -185,19 +195,19 @@ struct image_t* cv_blob_locator_func(struct image_t *img) {
uint16_t cgx = labels[largest_id].x_sum / labels[largest_id].pixel_cnt * 2;
uint16_t cgy = labels[largest_id].y_sum / labels[largest_id].pixel_cnt;
if ((cgx > 1) && (cgx < (dst.w-2)) &&
(cgy > 1) && (cgy < (dst.h-2))
) {
p[cgy*dst.w*2+cgx*2-4] = 0xff;
p[cgy*dst.w*2+cgx*2-2] = 0x00;
p[cgy*dst.w*2+cgx*2] = 0xff;
p[cgy*dst.w*2+cgx*2+2] = 0x00;
p[cgy*dst.w*2+cgx*2+4] = 0xff;
p[cgy*dst.w*2+cgx*2+6] = 0x00;
p[(cgy-1)*dst.w*2+cgx*2] = 0xff;
p[(cgy-1)*dst.w*2+cgx*2+2] = 0x00;
p[(cgy+1)*dst.w*2+cgx*2] = 0xff;
p[(cgy+1)*dst.w*2+cgx*2+2] = 0x00;
if ((cgx > 1) && (cgx < (dst.w - 2)) &&
(cgy > 1) && (cgy < (dst.h - 2))
) {
p[cgy * dst.w * 2 + cgx * 2 - 4] = 0xff;
p[cgy * dst.w * 2 + cgx * 2 - 2] = 0x00;
p[cgy * dst.w * 2 + cgx * 2] = 0xff;
p[cgy * dst.w * 2 + cgx * 2 + 2] = 0x00;
p[cgy * dst.w * 2 + cgx * 2 + 4] = 0xff;
p[cgy * dst.w * 2 + cgx * 2 + 6] = 0x00;
p[(cgy - 1)*dst.w * 2 + cgx * 2] = 0xff;
p[(cgy - 1)*dst.w * 2 + cgx * 2 + 2] = 0x00;
p[(cgy + 1)*dst.w * 2 + cgx * 2] = 0xff;
p[(cgy + 1)*dst.w * 2 + cgx * 2 + 2] = 0x00;
}
@@ -217,7 +227,8 @@ struct image_t* cv_blob_locator_func(struct image_t *img) {
#include <stdio.h>
void cv_blob_locator_init(void) {
void cv_blob_locator_init(void)
{
// Red board in sunlight
color_lum_min = 100;
color_lum_max = 200;
@@ -238,40 +249,41 @@ void cv_blob_locator_init(void) {
georeference_init();
cv_add_to_device(&BLOB_LOCATOR_CAMERA, cv_blob_locator_func);
cv_add_to_device(&BLOB_LOCATOR_CAMERA, cv_marker_func);
cv_add_to_device(&BLOB_LOCATOR_CAMERA, cv_window_func);
cv_add_to_device(&BLOB_LOCATOR_CAMERA, cv_blob_locator_func, BLOB_LOCATOR_FPS);
cv_add_to_device(&BLOB_LOCATOR_CAMERA, cv_marker_func, BLOB_LOCATOR_FPS);
cv_add_to_device(&BLOB_LOCATOR_CAMERA, cv_window_func, BLOB_LOCATOR_FPS);
}
void cv_blob_locator_periodic(void) {
void cv_blob_locator_periodic(void)
{
}
void cv_blob_locator_event(void) {
switch (cv_blob_locator_type)
{
case 1:
blob_enabled = true;
marker_enabled = false;
window_enabled = false;
break;
case 2:
blob_enabled = false;
marker_enabled = true;
window_enabled = false;
break;
case 3:
blob_enabled = false;
marker_enabled = false;
window_enabled = true;
break;
default:
blob_enabled = false;
marker_enabled = false;
window_enabled = false;
break;
void cv_blob_locator_event(void)
{
switch (cv_blob_locator_type) {
case 1:
blob_enabled = true;
marker_enabled = false;
window_enabled = false;
break;
case 2:
blob_enabled = false;
marker_enabled = true;
window_enabled = false;
break;
case 3:
blob_enabled = false;
marker_enabled = false;
window_enabled = true;
break;
default:
blob_enabled = false;
marker_enabled = false;
window_enabled = false;
break;
}
if (blob_locator != 0) {
// CV thread has results: import
@@ -282,11 +294,11 @@ void cv_blob_locator_event(void) {
uint16_t y = temp & 0x0000ffff;
temp = temp >> 16;
uint16_t x = temp & 0x0000ffff;
printf("Found %d %d \n",x,y);
printf("Found %d %d \n", x, y);
struct camera_frame_t cam;
cam.px = x/2;
cam.py = y/2;
cam.px = x / 2;
cam.py = y / 2;
cam.f = 400;
cam.h = 240;
cam.w = 320;
@@ -295,31 +307,36 @@ void cv_blob_locator_event(void) {
georeference_project(&cam, WP_p1);
#endif
#ifdef WP_CAM
georeference_filter(FALSE,WP_CAM, geofilter_length);
georeference_filter(FALSE, WP_CAM, geofilter_length);
#endif
}
}
extern void cv_blob_locator_start(void) {
extern void cv_blob_locator_start(void)
{
georeference_init();
}
extern void cv_blob_locator_stop(void) {
extern void cv_blob_locator_stop(void)
{
}
void start_vision(void) {
void start_vision(void)
{
georeference_init();
record_video = 1;
cv_blob_locator_type = 3;
}
void start_vision_land(void) {
void start_vision_land(void)
{
georeference_init();
record_video = 1;
cv_blob_locator_type = 2;
}
void stop_vision(void) {
void stop_vision(void)
{
georeference_init();
record_video = 0;
cv_blob_locator_type = 0;
@@ -27,16 +27,19 @@
#include "modules/computer_vision/cv_opencvdemo.h"
#include "modules/computer_vision/opencv_example.h"
#ifndef OPENCVDEMO_FPS
#define OPENCVDEMO_FPS 0 ///< Default FPS (zero means run at camera fps)
#endif
PRINT_CONFIG_VAR(OPENCVDEMO_FPS)
// Function
struct image_t* opencv_func(struct image_t* img);
struct image_t* opencv_func(struct image_t* img)
struct image_t *opencv_func(struct image_t *img);
struct image_t *opencv_func(struct image_t *img)
{
if (img->type == IMAGE_YUV422)
{
if (img->type == IMAGE_YUV422) {
// Call OpenCV (C++ from paparazzi C function)
opencv_example((char*) img->buf, img->w, img->h);
opencv_example((char *) img->buf, img->w, img->h);
}
// opencv_example(NULL, 10,10);
@@ -46,6 +49,6 @@ struct image_t* opencv_func(struct image_t* img)
void opencvdemo_init(void)
{
cv_add_to_device(&OPENCVDEMO_CAMERA, opencv_func);
cv_add_to_device(&OPENCVDEMO_CAMERA, opencv_func, OPENCVDEMO_FPS);
}
@@ -23,6 +23,11 @@
*
*/
#ifndef DETECT_CONTOUR_FPS
#define DETECT_CONTOUR_FPS 0 ///< Default FPS (zero means run at camera fps)
#endif
PRINT_CONFIG_VAR(DETECT_CONTOUR_FPS)
#include "modules/computer_vision/cv.h"
#include "modules/computer_vision/detect_contour.h"
#include "modules/computer_vision/opencv_contour.h"
@@ -41,7 +46,7 @@ struct image_t *contour_func(struct image_t *img)
void detect_contour_init(void)
{
cv_add_to_device(&DETECT_CONTOUR_CAMERA, contour_func);
cv_add_to_device(&DETECT_CONTOUR_CAMERA, contour_func, DETECT_CONTOUR_FPS);
// in the mavlab, bright
cont_thres.lower_y = 16; cont_thres.lower_u = 135; cont_thres.lower_v = 80;
cont_thres.upper_y = 100; cont_thres.upper_u = 175; cont_thres.upper_v = 165;
@@ -27,16 +27,21 @@
#define RES 100
#define N_WINDOW_SIZES 1
#ifndef DETECT_WINDOW_FPS
#define DETECT_WINDOW_FPS 0 ///< Default FPS (zero means run at camera fps)
#endif
PRINT_CONFIG_VAR(DETECT_WINDOW_FPS)
#include "cv.h"
#include "detect_window.h"
#include <stdio.h>
void detect_window_init(void)
{
cv_add_to_device(&BLOB_LOCATOR_CAMERA, detect_window);
cv_add_to_device(&DETECT_WINDOW_CAMERA, detect_window, DETECT_WINDOW_FPS);
}
struct image_t* detect_window(struct image_t *img)
struct image_t *detect_window(struct image_t *img)
{
uint16_t coordinate[2];
@@ -52,7 +52,7 @@ static void fast_make_offsets(int32_t *pixel, uint16_t row_stride, uint8_t pixel
void fast9_detect(struct image_t *img, uint8_t threshold, uint16_t min_dist, uint16_t x_padding, uint16_t y_padding, uint16_t *num_corners, uint16_t *ret_corners_length, struct point_t **ret_corners, uint16_t *roi)
{
uint16_t corner_cnt = 0;
uint16_t corner_cnt = *num_corners;
int pixel[16];
int16_t i;
uint16_t x, y, x_min, x_max, y_min, x_start, x_end, y_start, y_end;
@@ -63,6 +63,14 @@ void fast9_detect(struct image_t *img, uint8_t threshold, uint16_t min_dist, uin
pixel_size = 2;
}
// Padding less than min_dist could cause overflow on some comparisons below.
if (x_padding < min_dist) {
x_padding = min_dist;
}
if (y_padding < min_dist) {
y_padding = min_dist;
}
if (!roi) {
x_start = 3 + x_padding;
y_start = 3 + y_padding;
@@ -82,7 +90,9 @@ void fast9_detect(struct image_t *img, uint8_t threshold, uint16_t min_dist, uin
// Go trough all the pixels (minus the borders and inside the requested roi)
for (y = y_start; y < y_end; y++) {
if (min_dist > 0) { y_min = y - min_dist; }
if (min_dist > 0) {
y_min = y - min_dist;
}
for (x = x_start; x < x_end; x++) {
// First check if we aren't in range vertical (TODO: fix less intensive way)
@@ -103,6 +113,15 @@ void fast9_detect(struct image_t *img, uint8_t threshold, uint16_t min_dist, uin
if ((*ret_corners)[i].y < y_min) {
break;
}
/*
// If detecting with already existing corners gives too much overlap uncomment this comparison instead of the one above.
// But, it will make the detection more time consuming
// TODO: maybe sort the corners before calling...
if(ret_corners[i].y < y_min || ret_corners[i].y > y_max){
i--;
continue;
}
*/
if (x_min < (*ret_corners)[i].x && (*ret_corners)[i].x < x_max) {
need_skip = 1;
@@ -29,6 +29,7 @@
#include "std.h"
#include <sys/time.h>
#include <state.h>
/* The different type of images we currently support */
enum image_type {
@@ -44,6 +45,7 @@ struct image_t {
uint16_t w; ///< Image width
uint16_t h; ///< Image height
struct timeval ts; ///< The timestamp of creation
struct FloatEulers *eulerAngles; ///< Pointer to the Euler Angles
uint32_t pprz_ts; ///< The timestamp in us since system startup
uint8_t buf_idx; ///< Buffer index for V4L2 freeing
@@ -55,6 +57,9 @@ struct image_t {
struct point_t {
uint32_t x; ///< The x coordinate of the point
uint32_t y; ///< The y coordinate of the point
uint16_t count; ///< Number of times the point has been tracked successfully
uint16_t x_sub; ///< The x subpixel coordinate of the point
uint16_t y_sub; ///< The y subpixel coordinate of the point
};
/* Vector structure for point differences */
@@ -76,8 +76,7 @@ struct flow_t *opticFlowLK(struct image_t *new_img, struct image_t *old_img, str
{
// if no pyramids, use the old code:
if(pyramid_level == 0)
{
if (pyramid_level == 0) {
// use the old code in this case:
return opticFlowLK_flat(new_img, old_img, points, points_cnt, half_window_size, subpixel_factor, max_iterations, step_threshold, max_points);
}
@@ -87,6 +86,7 @@ struct flow_t *opticFlowLK(struct image_t *new_img, struct image_t *old_img, str
// Determine patch sizes and initialize neighborhoods
uint16_t patch_size = 2 * half_window_size + 1;
// TODO: Feature management shows that this threshold rejects corners maybe too often, maybe another formula could be chosen
uint32_t error_threshold = (25 * 25) * (patch_size * patch_size);
uint16_t padded_patch_size = patch_size + 2;
uint8_t border_size = padded_patch_size / 2 + 2; // amount of padding added to images
@@ -251,7 +251,8 @@ struct flow_t *opticFlowLK(struct image_t *new_img, struct image_t *old_img, str
* @return The vectors from the original *points in subpixels
*/
struct flow_t *opticFlowLK_flat(struct image_t *new_img, struct image_t *old_img, struct point_t *points, uint16_t *points_cnt,
uint16_t half_window_size, uint16_t subpixel_factor, uint8_t max_iterations, uint8_t step_threshold, uint16_t max_points) {
uint16_t half_window_size, uint16_t subpixel_factor, uint8_t max_iterations, uint8_t step_threshold, uint16_t max_points)
{
// A straightforward one-level implementation of Lucas-Kanade.
// For all points:
// (1) determine the subpixel neighborhood in the old image
@@ -271,7 +272,7 @@ struct flow_t *opticFlowLK_flat(struct image_t *new_img, struct image_t *old_img
// determine patch sizes and initialize neighborhoods
uint16_t patch_size = 2 * half_window_size;
uint32_t error_threshold = (25 * 25) *(patch_size *patch_size);
uint32_t error_threshold = (25 * 25) * (patch_size * patch_size);
uint16_t padded_patch_size = patch_size + 2;
// Create the window images
@@ -118,7 +118,7 @@ PRINT_CONFIG_VAR(OPTICFLOW_MAX_ITERATIONS)
PRINT_CONFIG_VAR(OPTICFLOW_THRESHOLD_VEC)
#ifndef OPTICFLOW_PYRAMID_LEVEL
#define OPTICFLOW_PYRAMID_LEVEL 0
#define OPTICFLOW_PYRAMID_LEVEL 2
#endif
PRINT_CONFIG_VAR(OPTICFLOW_PYRAMID_LEVEL)
@@ -185,6 +185,21 @@ PRINT_CONFIG_VAR(OPTICFLOW_KALMAN_FILTER)
#endif
PRINT_CONFIG_VAR(OPTICFLOW_KALMAN_FILTER_PROCESS_NOISE)
#ifndef OPTICFLOW_FEATURE_MANAGEMENT
#define OPTICFLOW_FEATURE_MANAGEMENT 1
#endif
PRINT_CONFIG_VAR(OPTICFLOW_FEATURE_MANAGEMENT)
#ifndef OPTICFLOW_FAST9_REGION_DETECT
#define OPTICFLOW_FAST9_REGION_DETECT 1
#endif
PRINT_CONFIG_VAR(OPTICFLOW_FAST9_REGION_DETECT)
#ifndef OPTICFLOW_FAST9_NUM_REGIONS
#define OPTICFLOW_FAST9_NUM_REGIONS 9
#endif
PRINT_CONFIG_VAR(OPTICFLOW_FAST9_NUM_REGIONS)
//Include median filter
#include "filters/median_filter.h"
struct MedianFilterInt vel_x_filt, vel_y_filt;
@@ -193,6 +208,7 @@ struct MedianFilterInt vel_x_filt, vel_y_filt;
/* Functions only used here */
static uint32_t timeval_diff(struct timeval *starttime, struct timeval *finishtime);
static int cmp_flow(const void *a, const void *b);
static int cmp_array(const void *a, const void *b);
@@ -219,6 +235,9 @@ void opticflow_calc_init(struct opticflow_t *opticflow)
opticflow->median_filter = OPTICFLOW_MEDIAN_FILTER;
opticflow->kalman_filter = OPTICFLOW_KALMAN_FILTER;
opticflow->kalman_filter_process_noise = OPTICFLOW_KALMAN_FILTER_PROCESS_NOISE;
opticflow->feature_management = OPTICFLOW_FEATURE_MANAGEMENT;
opticflow->fast9_region_detect = OPTICFLOW_FAST9_REGION_DETECT;
opticflow->fast9_num_regions = OPTICFLOW_FAST9_NUM_REGIONS;
opticflow->fast9_adaptive = OPTICFLOW_FAST9_ADAPTIVE;
opticflow->fast9_threshold = OPTICFLOW_FAST9_THRESHOLD;
@@ -238,17 +257,17 @@ void calc_fast9_lukas_kanade(struct opticflow_t *opticflow, struct opticflow_sta
struct opticflow_result_t *result)
{
if (opticflow->just_switched_method) {
// Create the image buffers
image_create(&opticflow->img_gray, img->w, img->h, IMAGE_GRAYSCALE);
image_create(&opticflow->prev_img_gray, img->w, img->h, IMAGE_GRAYSCALE);
// Create the image buffers
image_create(&opticflow->img_gray, img->w, img->h, IMAGE_GRAYSCALE);
image_create(&opticflow->prev_img_gray, img->w, img->h, IMAGE_GRAYSCALE);
// Set the previous values
opticflow->got_first_img = false;
FLOAT_RATES_ZERO(opticflow->prev_rates);
// Set the previous values
opticflow->got_first_img = false;
FLOAT_RATES_ZERO(opticflow->prev_rates);
// Init median filters with zeros
init_median_filter(&vel_x_filt);
init_median_filter(&vel_y_filt);
// Init median filters with zeros
init_median_filter(&vel_x_filt);
init_median_filter(&vel_y_filt);
}
// variables for size_divergence:
@@ -276,24 +295,74 @@ void calc_fast9_lukas_kanade(struct opticflow_t *opticflow, struct opticflow_sta
// Corner detection
// *************************************************************************************
// FAST corner detection
// TODO: There is something wrong with fast9_detect destabilizing FPS. This problem is reduced with putting min_distance
// to 0 (see defines), however a more permanent solution should be considered
// last parameter (for ROI detection) set to NULL because feature management is not implemented yet.
fast9_detect(img, opticflow->fast9_threshold, opticflow->fast9_min_distance,
opticflow->fast9_padding, opticflow->fast9_padding, &result->corner_cnt,
&opticflow->fast9_rsize,
&opticflow->fast9_ret_corners,
NULL);
// if feature_management is selected and tracked corners drop below a threshold, redetect
if ((opticflow->feature_management) && (result->corner_cnt < opticflow->max_track_corners / 2)) {
// no need for "per region" re-detection when there are no previous corners
if ((!opticflow->fast9_region_detect) || (result->corner_cnt == 0)) {
fast9_detect(&opticflow->prev_img_gray, opticflow->fast9_threshold, opticflow->fast9_min_distance,
opticflow->fast9_padding, opticflow->fast9_padding, &result->corner_cnt,
&opticflow->fast9_rsize,
&opticflow->fast9_ret_corners,
NULL);
} else {
// allocating memory and initializing the 2d array that holds the number of corners per region and its index (for the sorting)
uint16_t **region_count = malloc(opticflow->fast9_num_regions * sizeof(uint16_t *));
for (uint16_t i = 0; i < opticflow->fast9_num_regions ; i++) {
region_count[i] = malloc(sizeof(uint16_t) * 2);
region_count[i][0] = 0;
region_count[i][1] = i;
}
for (uint16_t i = 0; i < result->corner_cnt; i++) {
region_count[(opticflow->fast9_ret_corners[i].x / (img->w / (uint8_t)sqrt(opticflow->fast9_num_regions))
+ opticflow->fast9_ret_corners[i].y / (img->h / (uint8_t)sqrt(opticflow->fast9_num_regions)) * (uint8_t)sqrt(opticflow->fast9_num_regions))][0]++;
}
// Adaptive threshold
if (opticflow->fast9_adaptive) {
// Decrease and increase the threshold based on previous values
if (result->corner_cnt < 40
&& opticflow->fast9_threshold > FAST9_LOW_THRESHOLD) { // TODO: Replace 40 with OPTICFLOW_MAX_TRACK_CORNERS / 2
opticflow->fast9_threshold--;
} else if (result->corner_cnt > OPTICFLOW_MAX_TRACK_CORNERS * 2 && opticflow->fast9_threshold < FAST9_HIGH_THRESHOLD) {
opticflow->fast9_threshold++;
//sorting region_count array according to first column (number of corners).
qsort(region_count, opticflow->fast9_num_regions, sizeof(region_count[0]), cmp_array);
// Detecting corners from the region with the less to the one with the most, until a desired total is reached.
for (uint16_t i = 0; i < opticflow->fast9_num_regions && result->corner_cnt < 2 * opticflow->max_track_corners ; i++) {
// Find the boundaries of the region of interest
uint16_t *roi = malloc(4 * sizeof(uint16_t));
roi[0] = (region_count[i][1] % (uint8_t)sqrt(opticflow->fast9_num_regions)) * (img->w / (uint8_t)sqrt(opticflow->fast9_num_regions));
roi[1] = (region_count[i][1] / (uint8_t)sqrt(opticflow->fast9_num_regions)) * (img->h / (uint8_t)sqrt(opticflow->fast9_num_regions));
roi[2] = roi[0] + (img->w / (uint8_t)sqrt(opticflow->fast9_num_regions));
roi[3] = roi[1] + (img->h / (uint8_t)sqrt(opticflow->fast9_num_regions));
fast9_detect(&opticflow->prev_img_gray, opticflow->fast9_threshold, opticflow->fast9_min_distance,
opticflow->fast9_padding, opticflow->fast9_padding, &result->corner_cnt,
&opticflow->fast9_rsize,
&opticflow->fast9_ret_corners,
roi);
free(roi);
}
for (uint16_t i = 0; i < opticflow->fast9_num_regions; i++) {
free(region_count[i]);
}
free(region_count);
}
} else if (!opticflow->feature_management) {
// needs to be set to 0 because result is now static
result->corner_cnt = 0;
// FAST corner detection
// TODO: There is something wrong with fast9_detect destabilizing FPS. This problem is reduced with putting min_distance
// to 0 (see defines), however a more permanent solution should be considered
fast9_detect(&opticflow->prev_img_gray, opticflow->fast9_threshold, opticflow->fast9_min_distance,
opticflow->fast9_padding, opticflow->fast9_padding, &result->corner_cnt,
&opticflow->fast9_rsize,
&opticflow->fast9_ret_corners,
NULL);
// Adaptive threshold
if (opticflow->fast9_adaptive) {
// Decrease and increase the threshold based on previous values
if (result->corner_cnt < 40
&& opticflow->fast9_threshold > FAST9_LOW_THRESHOLD) { // TODO: Replace 40 with OPTICFLOW_MAX_TRACK_CORNERS / 2
opticflow->fast9_threshold--;
} else if (result->corner_cnt > OPTICFLOW_MAX_TRACK_CORNERS * 2 && opticflow->fast9_threshold < FAST9_HIGH_THRESHOLD) {
opticflow->fast9_threshold++;
}
}
}
@@ -303,6 +372,8 @@ void calc_fast9_lukas_kanade(struct opticflow_t *opticflow, struct opticflow_sta
// Check if we found some corners to track
if (result->corner_cnt < 1) {
// Clear the result otherwise the previous values will be returned for this frame too
memset(result, 0, sizeof(struct opticflow_result_t));
image_copy(&opticflow->img_gray, &opticflow->prev_img_gray);
return;
}
@@ -319,7 +390,6 @@ void calc_fast9_lukas_kanade(struct opticflow_t *opticflow, struct opticflow_sta
opticflow->threshold_vec, opticflow->max_track_corners, opticflow->pyramid_level);
#if OPTICFLOW_SHOW_FLOW
printf("show: n tracked = %d\n", result->tracked_cnt);
image_show_flow(img, vectors, result->tracked_cnt, opticflow->subpixel_factor);
#endif
@@ -423,6 +493,18 @@ void calc_fast9_lukas_kanade(struct opticflow_t *opticflow, struct opticflow_sta
// *************************************************************************************
// Next Loop Preparation
// *************************************************************************************
if (opticflow->feature_management) {
result->corner_cnt = result->tracked_cnt;
//get the new positions of the corners and the "residual" subpixel positions
for (uint16_t i = 0; i < result->tracked_cnt; i++) {
opticflow->fast9_ret_corners[i].x = (uint32_t)((vectors[i].pos.x + (float)vectors[i].flow_x) / opticflow->subpixel_factor);
opticflow->fast9_ret_corners[i].y = (uint32_t)((vectors[i].pos.y + (float)vectors[i].flow_y) / opticflow->subpixel_factor);
opticflow->fast9_ret_corners[i].x_sub = (uint16_t)((vectors[i].pos.x + vectors[i].flow_x) % opticflow->subpixel_factor);
opticflow->fast9_ret_corners[i].y_sub = (uint16_t)((vectors[i].pos.y + vectors[i].flow_y) % opticflow->subpixel_factor);
opticflow->fast9_ret_corners[i].count = vectors[i].pos.count;
}
}
free(vectors);
image_switch(&opticflow->img_gray, &opticflow->prev_img_gray);
}
@@ -614,6 +696,8 @@ void opticflow_calc_frame(struct opticflow_t *opticflow, struct opticflow_state_
if (switch_counter != opticflow->method) {
opticflow->just_switched_method = true;
switch_counter = opticflow->method;
// Clear the static result
memset(result, 0, sizeof(struct opticflow_result_t));
} else {
opticflow->just_switched_method = false;
}
@@ -762,4 +846,17 @@ static int cmp_flow(const void *a, const void *b)
b_p->flow_y);
}
/**
* Compare the rows of an integer (uint16_t) 2D array based on the first column.
* Used for sorting.
* @param[in] *a The first row (should be *uint16_t)
* @param[in] *b The second flow vector (should be *uint16_t)
* @return Negative if a[0] < b[0],0 if a[0] == b[0] and positive if a[0] > b[0]
*/
static int cmp_array(const void *a, const void *b)
{
const uint16_t *pa = *(const uint16_t **)a;
const uint16_t *pb = *(const uint16_t **)b;
return pa[0] - pb[0];
}
@@ -70,6 +70,9 @@ struct opticflow_t {
uint16_t fast9_rsize; ///< Amount of corners allocated
struct point_t *fast9_ret_corners; ///< Corners
bool feature_management; ///< Decides whether to keep track corners in memory for the next frame instead of re-detecting every time
bool fast9_region_detect; ///< Decides whether to detect fast9 corners in specific regions of interest or the whole image (only for feature management)
uint8_t fast9_num_regions; ///< The number of regions of interest the image is split into
};
@@ -61,6 +61,10 @@ PRINT_CONFIG_VAR(OPTICFLOW_BODY_TO_IMU_ID)
#endif
PRINT_CONFIG_VAR(OPTICFLOW_SEND_ABI_ID)
#ifndef OPTICFLOW_FPS
#define OPTICFLOW_FPS 0 ///< Default FPS (zero means run at camera fps)
#endif
PRINT_CONFIG_VAR(OPTICFLOW_FPS)
/* The main opticflow variables */
struct opticflow_t opticflow; ///< Opticflow calculations
@@ -126,7 +130,7 @@ void opticflow_module_init(void)
opticflow_got_result = false;
opticflow_calc_init(&opticflow);
cv_add_to_device(&OPTICFLOW_CAMERA, opticflow_module_calc);
cv_add_to_device(&OPTICFLOW_CAMERA, opticflow_module_calc, OPTICFLOW_FPS);
#if PERIODIC_TELEMETRY
register_periodic_telemetry(DefaultPeriodic, PPRZ_MSG_ID_OPTIC_FLOW_EST, opticflow_telem_send);
@@ -183,7 +187,7 @@ struct image_t *opticflow_module_calc(struct image_t *img)
temp_state.rates = pose.rates;
// Do the optical flow calculation
struct opticflow_result_t temp_result = {}; // new initialization
static struct opticflow_result_t temp_result = {}; // static so that the number of corners is kept between frames
opticflow_calc_frame(&opticflow, &temp_state, img, &temp_result);
// Copy the result if finished
@@ -34,10 +34,15 @@
#endif
bool drawRectangleAroundQRCode = QRCODE_DRAW_RECTANGLE;
#ifndef QRCODE_FPS
#define QRCODE_FPS 0 ///< Default FPS (zero means run at camera fps)
#endif
PRINT_CONFIG_VAR(QRCODE_FPS)
void qrcode_init(void)
{
// Add qrscan to the list of image processing tasks in video_thread
cv_add_to_device(&QRCODE_CAMERA, qrscan);
cv_add_to_device(&QRCODE_CAMERA, qrscan, QRCODE_FPS);
}
// Telemetry
@@ -46,6 +46,11 @@
#define VIDEO_CAPTURE_JPEG_QUALITY 99
#endif
#ifndef VIDEO_CAPTURE_FPS
#define VIDEO_CAPTURE_FPS 0 ///< Default FPS (zero means run at camera fps)
#endif
PRINT_CONFIG_VAR(VIDEO_CAPTURE_FPS)
// Module settings
bool video_capture_take_shot = false;
int video_capture_index = 0;
@@ -66,7 +71,7 @@ void video_capture_init(void)
}
// Add function to computer vision pipeline
cv_add_to_device(&VIDEO_CAPTURE_CAMERA, video_capture_func);
cv_add_to_device(&VIDEO_CAPTURE_CAMERA, video_capture_func, VIDEO_CAPTURE_FPS);
}
@@ -43,6 +43,11 @@
#define VIDEO_USB_LOGGER_PATH /data/video/usb
#endif
#ifndef VIDEO_USB_LOGGER_FPS
#define VIDEO_USB_LOGGER_FPS 0 ///< Default FPS (zero means run at camera fps)
#endif
PRINT_CONFIG_VAR(VIDEO_USB_LOGGER_FPS)
/** The file pointer */
static FILE *video_usb_logger = NULL;
struct image_t img_jpeg_global;
@@ -143,7 +148,7 @@ void video_usb_logger_start(void)
}
// Subscribe to a camera
cv_add_to_device(&VIDEO_USB_LOGGER_CAMERA, log_image);
cv_add_to_device(&VIDEO_USB_LOGGER_CAMERA, log_image, VIDEO_USB_LOGGER_FPS);
}
/** Stop the logger an nicely close the file */
@@ -246,15 +246,13 @@ void viewvideo_init(void)
#ifdef VIEWVIDEO_CAMERA
struct video_listener *listener1 = cv_add_to_device_async(&VIEWVIDEO_CAMERA, viewvideo_function1,
VIEWVIDEO_NICE_LEVEL);
listener1->maximum_fps = VIEWVIDEO_FPS;
fprintf(stderr, "[viewvideo] Added asynchronous video streamer lister for CAMERA1\n");
VIEWVIDEO_NICE_LEVEL, VIEWVIDEO_FPS);
fprintf(stderr, "[viewvideo] Added asynchronous video streamer listener for CAMERA1 at %u FPS \n", VIEWVIDEO_FPS);
#endif
#ifdef VIEWVIDEO_CAMERA2
struct video_listener *listener2 = cv_add_to_device_async(&VIEWVIDEO_CAMERA2, viewvideo_function2,
VIEWVIDEO_NICE_LEVEL);
listener2->maximum_fps = VIEWVIDEO_FPS;
fprintf(stderr, "[viewvideo] Added asynchronous video streamer lister for CAMERA2\n");
VIEWVIDEO_NICE_LEVEL, VIEWVIDEO_FPS);
fprintf(stderr, "[viewvideo] Added asynchronous video streamer listener for CAMERA2 at %u FPS \n", VIEWVIDEO_FPS);
#endif
}