From c5c9770d3a389ee574c77c797ffcea00dfb56b21 Mon Sep 17 00:00:00 2001 From: "Kwabena W. Agyeman" Date: Wed, 4 Mar 2020 23:00:59 -0800 Subject: [PATCH] Improve rotation_correction() * Optimized code to make it run slightly faster. * Added fov argument to control the zoom effect better. * Added perspective correction through 4 point correspondence. --- .../perspective_and_rotation_correction.py | 71 ++++++ .../perspective_correction.py | 39 ++++ .../04-Image-Filters/rotation_correction.py | 18 +- src/omv/img/apriltag.c | 217 +++++++++++------- src/omv/img/imlib.h | 2 +- src/omv/img/phasecorrelation.c | 2 +- src/omv/py/py_helper.c | 32 +++ src/omv/py/py_helper.h | 2 + src/omv/py/py_image.c | 28 ++- src/omv/py/qstrdefsomv.h | 4 +- 10 files changed, 308 insertions(+), 107 deletions(-) create mode 100644 scripts/examples/04-Image-Filters/perspective_and_rotation_correction.py create mode 100644 scripts/examples/04-Image-Filters/perspective_correction.py diff --git a/scripts/examples/04-Image-Filters/perspective_and_rotation_correction.py b/scripts/examples/04-Image-Filters/perspective_and_rotation_correction.py new file mode 100644 index 000000000..b89117d40 --- /dev/null +++ b/scripts/examples/04-Image-Filters/perspective_and_rotation_correction.py @@ -0,0 +1,71 @@ +# Rotation Correction +# +# This example shows off how to use the rotation_corr() to both correct for +# perspective distortion and then to rotate the new corrected image in 3D +# space aftwards to handle movement. + +import sensor, image, time + +sensor.reset() +sensor.set_pixformat(sensor.RGB565) +sensor.set_framesize(sensor.QVGA) +sensor.skip_frames(time = 2000) +clock = time.clock() + +# The image will be warped such that the following points become the new: +# +# (0, 0) +# (w-1, 0) +# (w-1, h-1) +# (0, h-1) +# +# Try setting the points below to the corners of a quadrilateral +# (in clock-wise order) in the field-of-view. You can get points +# on the image by clicking and dragging on the frame buffer and +# recording the values shown in the histogram widget. + +w = sensor.width() +h = sensor.height() + +TARGET_POINTS = [(0, 0), # (x, y) CHANGE ME! + (w-1, 0), # (x, y) CHANGE ME! + (w-1, h-1), # (x, y) CHANGE ME! + (0, h-1)] # (x, y) CHANGE ME! + +# Degrees per frame to rotation by... +X_ROTATION_DEGREE_RATE = 5 +Y_ROTATION_DEGREE_RATE = 0.5 +Z_ROTATION_DEGREE_RATE = 0 +X_OFFSET = 0 +Y_OFFSET = 0 + +ZOOM_AMOUNT = 1 # Lower zooms out - Higher zooms in. +FOV_WINDOW = 25 # Between 0 and 180. Represents the field-of-view of the scene + # window when rotating the image in 3D space. When closer to + # zero results in lines becoming straighter as the window + # moves away from the image being rotated in 3D space. A large + # value moves the window closer to the image in 3D space which + # results in the more perspective distortion and sometimes + # the image in 3D intersecting the scene window. + +x_rotation_counter = 0 +y_rotation_counter = 0 +z_rotation_counter = 0 + +while(True): + clock.tick() + + img = sensor.snapshot().rotation_corr(x_rotation = x_rotation_counter, \ + y_rotation = y_rotation_counter, \ + z_rotation = z_rotation_counter, \ + x_translation = X_OFFSET, \ + y_translation = Y_OFFSET, \ + zoom = ZOOM_AMOUNT, \ + fov = FOV_WINDOW, \ + corners = TARGET_POINTS) + + x_rotation_counter += X_ROTATION_DEGREE_RATE + y_rotation_counter += Y_ROTATION_DEGREE_RATE + z_rotation_counter += Z_ROTATION_DEGREE_RATE + + print(clock.fps()) diff --git a/scripts/examples/04-Image-Filters/perspective_correction.py b/scripts/examples/04-Image-Filters/perspective_correction.py new file mode 100644 index 000000000..a8f2aa58d --- /dev/null +++ b/scripts/examples/04-Image-Filters/perspective_correction.py @@ -0,0 +1,39 @@ +# Perspective Correction +# +# This example shows off how to use the rotation_corr() to fix perspective +# issues related to how your OpenMV Cam is mounted. + +import sensor, image, time + +sensor.reset() +sensor.set_pixformat(sensor.RGB565) +sensor.set_framesize(sensor.QVGA) +sensor.skip_frames(time = 2000) +clock = time.clock() + +# The image will be warped such that the following points become the new: +# +# (0, 0) +# (w-1, 0) +# (w-1, h-1) +# (0, h-1) +# +# Try setting the points below to the corners of a quadrilateral +# (in clock-wise order) in the field-of-view. You can get points +# on the image by clicking and dragging on the frame buffer and +# recording the values shown in the histogram widget. + +w = sensor.width() +h = sensor.height() + +TARGET_POINTS = [(0, 0), # (x, y) CHANGE ME! + (w-1, 0), # (x, y) CHANGE ME! + (w-1, h-1), # (x, y) CHANGE ME! + (0, h-1)] # (x, y) CHANGE ME! + +while(True): + clock.tick() + + img = sensor.snapshot().rotation_corr(corners = TARGET_POINTS) + + print(clock.fps()) diff --git a/scripts/examples/04-Image-Filters/rotation_correction.py b/scripts/examples/04-Image-Filters/rotation_correction.py index 82498f525..b95e41d78 100644 --- a/scripts/examples/04-Image-Filters/rotation_correction.py +++ b/scripts/examples/04-Image-Filters/rotation_correction.py @@ -1,9 +1,7 @@ # Rotation Correction # -# This example shows off how to use the rotation_corr() to undo perspective rotations -# in 3 dimensions along with zooming in and out on the image. While this demo rotates -# the image around for fun you can use this feature to fix perspective issues related -# to how your OpenMV Cam is mounted. +# This example shows off how to use the rotation_corr() to play with the scene +# window your OpenMV Cam sees. import sensor, image, time @@ -14,7 +12,14 @@ Z_ROTATION_DEGREE_RATE = 0 X_OFFSET = 0 Y_OFFSET = 0 -ZOOM_AMOUNT = 1 # Lower zooms out - Higher zooms in +ZOOM_AMOUNT = 1 # Lower zooms out - Higher zooms in. +FOV_WINDOW = 60 # Between 0 and 180. Represents the field-of-view of the scene + # window when rotating the image in 3D space. When closer to + # zero results in lines becoming straighter as the window + # moves away from the image being rotated in 3D space. A large + # value moves the window closer to the image in 3D space which + # results in the more perspective distortion and sometimes + # the image in 3D intersecting the scene window. sensor.reset() sensor.set_pixformat(sensor.RGB565) @@ -34,7 +39,8 @@ while(True): z_rotation = z_rotation_counter, \ x_translation = X_OFFSET, \ y_translation = Y_OFFSET, \ - zoom = ZOOM_AMOUNT) + zoom = ZOOM_AMOUNT, \ + fov = FOV_WINDOW) x_rotation_counter += X_ROTATION_DEGREE_RATE y_rotation_counter += Y_ROTATION_DEGREE_RATE diff --git a/src/omv/img/apriltag.c b/src/omv/img/apriltag.c index 8baaf7838..f231168dc 100644 --- a/src/omv/img/apriltag.c +++ b/src/omv/img/apriltag.c @@ -12327,21 +12327,20 @@ void imlib_find_rects(list_t *out, image_t *ptr, rectangle_t *roi, uint32_t thre // http://jepsonsblog.blogspot.com/2012/11/rotation-in-3d-using-opencvs.html void imlib_rotation_corr(image_t *img, float x_rotation, float y_rotation, float z_rotation, float x_translation, float y_translation, - float zoom) + float zoom, float fov, float *corners) { - umm_init_x(4000); // 200 20 byte heap blocks... + umm_init_x(8000); // 400 20 byte heap blocks... - float fov = (M_PI_2 * 2) / 3; // 60 deg FOV - float fov_2 = fov / 2.0; - float d = fast_sqrtf((img->w * img->w) + (img->h * img->h)); - float h = d / (2.0 * tanf(fov_2)); - float h_z = h * zoom; + int w = img->w; + int h = img->h; + float z = (fast_sqrtf((w * w) + (h * h)) / 2) / tanf(fov / 2); + float z_z = z * zoom; matd_t *A1 = matd_create(4, 3); - MATD_EL(A1, 0, 0) = 1; MATD_EL(A1, 0, 1) = 0; MATD_EL(A1, 0, 2) = -img->w / 2.0; - MATD_EL(A1, 1, 0) = 0; MATD_EL(A1, 1, 1) = 1; MATD_EL(A1, 1, 2) = -img->h / 2.0; + MATD_EL(A1, 0, 0) = 1; MATD_EL(A1, 0, 1) = 0; MATD_EL(A1, 0, 2) = -w / 2; + MATD_EL(A1, 1, 0) = 0; MATD_EL(A1, 1, 1) = 1; MATD_EL(A1, 1, 2) = -h / 2; MATD_EL(A1, 2, 0) = 0; MATD_EL(A1, 2, 1) = 0; MATD_EL(A1, 2, 2) = 0; - MATD_EL(A1, 3, 0) = 0; MATD_EL(A1, 3, 1) = 0; MATD_EL(A1, 3, 2) = 1; // needed for h translation + MATD_EL(A1, 3, 0) = 0; MATD_EL(A1, 3, 1) = 0; MATD_EL(A1, 3, 2) = 1; // needed for z translation matd_t *RX = matd_create(4, 4); MATD_EL(RX, 0, 0) = 1; MATD_EL(RX, 0, 1) = 0; MATD_EL(RX, 0, 2) = 0; MATD_EL(RX, 0, 3) = 0; @@ -12366,98 +12365,144 @@ void imlib_rotation_corr(image_t *img, float x_rotation, float y_rotation, float matd_t *T = matd_create(4, 4); MATD_EL(T, 0, 0) = 1; MATD_EL(T, 0, 1) = 0; MATD_EL(T, 0, 2) = 0; MATD_EL(T, 0, 3) = x_translation; MATD_EL(T, 1, 0) = 0; MATD_EL(T, 1, 1) = 1; MATD_EL(T, 1, 2) = 0; MATD_EL(T, 1, 3) = y_translation; - MATD_EL(T, 2, 0) = 0; MATD_EL(T, 2, 1) = 0; MATD_EL(T, 2, 2) = 1; MATD_EL(T, 2, 3) = h; + MATD_EL(T, 2, 0) = 0; MATD_EL(T, 2, 1) = 0; MATD_EL(T, 2, 2) = 1; MATD_EL(T, 2, 3) = z; MATD_EL(T, 3, 0) = 0; MATD_EL(T, 3, 1) = 0; MATD_EL(T, 3, 2) = 0; MATD_EL(T, 3, 3) = 1; matd_t *A2 = matd_create(3, 4); - MATD_EL(A2, 0, 0) = h_z; MATD_EL(A2, 0, 1) = 0; MATD_EL(A2, 0, 2) = img->w / 2.0; MATD_EL(A2, 0, 3) = 0; - MATD_EL(A2, 1, 0) = 0; MATD_EL(A2, 1, 1) = h_z; MATD_EL(A2, 1, 2) = img->h / 2.0; MATD_EL(A2, 1, 3) = 0; - MATD_EL(A2, 2, 0) = 0; MATD_EL(A2, 2, 1) = 0; MATD_EL(A2, 2, 2) = 1; MATD_EL(A2, 2, 3) = 0; + MATD_EL(A2, 0, 0) = z_z; MATD_EL(A2, 0, 1) = 0; MATD_EL(A2, 0, 2) = w / 2; MATD_EL(A2, 0, 3) = 0; + MATD_EL(A2, 1, 0) = 0; MATD_EL(A2, 1, 1) = z_z; MATD_EL(A2, 1, 2) = h / 2; MATD_EL(A2, 1, 3) = 0; + MATD_EL(A2, 2, 0) = 0; MATD_EL(A2, 2, 1) = 0; MATD_EL(A2, 2, 2) = 1; MATD_EL(A2, 2, 3) = 0; matd_t *T1 = matd_op("M*M", R, A1); matd_t *T2 = matd_op("M*M", T, T1); matd_t *T3 = matd_op("M*M", A2, T2); matd_t *T4 = matd_inverse(T3); - switch(img->bpp) { - case IMAGE_BPP_BINARY: { - // Create a temp copy of the image to pull pixels from. - uint32_t *tmp = fb_alloc(((img->w + UINT32_T_MASK) >> UINT32_T_SHIFT) * img->h, FB_ALLOC_NO_HINT); - memcpy(tmp, img->data, ((img->w + UINT32_T_MASK) >> UINT32_T_SHIFT) * img->h); - memset(img->data, 0, ((img->w + UINT32_T_MASK) >> UINT32_T_SHIFT) * img->h); + if (corners) { + float corr[4]; + zarray_t *correspondences = zarray_create(sizeof(float[4])); - if (T4) for (int y = 0, yy = img->h; y < yy; y++) { - uint32_t *row_ptr = IMAGE_COMPUTE_BINARY_PIXEL_ROW_PTR(img, y); - for (int x = 0, xx = img->w; x < xx; x++) { - float sourceX, sourceY; homography_project(T4, x, y, &sourceX, &sourceY); - int sourceX2 = round(sourceX); - int sourceY2 = round(sourceY); + corr[0] = 0; + corr[1] = 0; + corr[2] = corners[0]; + corr[3] = corners[1]; + zarray_add(correspondences, &corr); - if ((0 <= sourceX2) && (sourceX2 < img->w) && (0 <= sourceY2) && (sourceY2 < img->h)) { - uint32_t *ptr = tmp + (((img->w + UINT32_T_MASK) >> UINT32_T_SHIFT) * sourceY2); - int pixel = IMAGE_GET_BINARY_PIXEL_FAST(ptr, sourceX2); - IMAGE_PUT_BINARY_PIXEL_FAST(row_ptr, x, pixel); - } - } - } + corr[0] = w - 1; + corr[1] = 0; + corr[2] = corners[2]; + corr[3] = corners[3]; + zarray_add(correspondences, &corr); - fb_free(); - break; + corr[0] = w - 1; + corr[1] = h- 1; + corr[2] = corners[4]; + corr[3] = corners[5]; + zarray_add(correspondences, &corr); + + corr[0] = 0; + corr[1] = h - 1; + corr[2] = corners[6]; + corr[3] = corners[7]; + zarray_add(correspondences, &corr); + + matd_t *H = homography_compute(correspondences, HOMOGRAPHY_COMPUTE_FLAG_SVD); + + if (T4 && H) { + matd_t *T5 = matd_op("M*M", H, T4); + matd_destroy(H); + matd_destroy(T4); + T4 = T5; } - case IMAGE_BPP_GRAYSCALE: { - // Create a temp copy of the image to pull pixels from. - uint8_t *tmp = fb_alloc(img->w * img->h * sizeof(uint8_t), FB_ALLOC_NO_HINT); - memcpy(tmp, img->data, img->w * img->h * sizeof(uint8_t)); - memset(img->data, 0, img->w * img->h * sizeof(uint8_t)); - if (T4) for (int y = 0, yy = img->h; y < yy; y++) { - uint8_t *row_ptr = IMAGE_COMPUTE_GRAYSCALE_PIXEL_ROW_PTR(img, y); - for (int x = 0, xx = img->w; x < xx; x++) { - float sourceX, sourceY; homography_project(T4, x, y, &sourceX, &sourceY); - int sourceX2 = round(sourceX); - int sourceY2 = round(sourceY); - - if ((0 <= sourceX2) && (sourceX2 < img->w) && (0 <= sourceY2) && (sourceY2 < img->h)) { - uint8_t *ptr = tmp + (img->w * sourceY2); - int pixel = IMAGE_GET_GRAYSCALE_PIXEL_FAST(ptr, sourceX2); - IMAGE_PUT_GRAYSCALE_PIXEL_FAST(row_ptr, x, pixel); - } - } - } - - fb_free(); - break; - } - case IMAGE_BPP_RGB565: { - // Create a temp copy of the image to pull pixels from. - uint16_t *tmp = fb_alloc(img->w * img->h * sizeof(uint16_t), FB_ALLOC_NO_HINT); - memcpy(tmp, img->data, img->w * img->h * sizeof(uint16_t)); - memset(img->data, 0, img->w * img->h * sizeof(uint16_t)); - - if (T4) for (int y = 0, yy = img->h; y < yy; y++) { - uint16_t *row_ptr = IMAGE_COMPUTE_RGB565_PIXEL_ROW_PTR(img, y); - for (int x = 0, xx = img->w; x < xx; x++) { - float sourceX, sourceY; homography_project(T4, x, y, &sourceX, &sourceY); - int sourceX2 = round(sourceX); - int sourceY2 = round(sourceY); - - if ((0 <= sourceX2) && (sourceX2 < img->w) && (0 <= sourceY2) && (sourceY2 < img->h)) { - uint16_t *ptr = tmp + (img->w * sourceY2); - int pixel = IMAGE_GET_RGB565_PIXEL_FAST(ptr, sourceX2); - IMAGE_PUT_RGB565_PIXEL_FAST(row_ptr, x, pixel); - } - } - } - - fb_free(); - break; - } - default: { - break; - } + zarray_destroy(correspondences); } - if (T4) matd_destroy(T4); + // Create a tmp copy of the image to pull pixels from. + size_t size = image_size(img); + void *data = fb_alloc(size, FB_ALLOC_NO_HINT); + memcpy(data, img->data, size); + memset(img->data, 0, size); + + if (T4) { + float T4_00 = MATD_EL(T4, 0, 0), T4_01 = MATD_EL(T4, 0, 1), T4_02 = MATD_EL(T4, 0, 2); + float T4_10 = MATD_EL(T4, 1, 0), T4_11 = MATD_EL(T4, 1, 1), T4_12 = MATD_EL(T4, 1, 2); + float T4_20 = MATD_EL(T4, 2, 0), T4_21 = MATD_EL(T4, 2, 1), T4_22 = MATD_EL(T4, 2, 2); + + switch(img->bpp) { + case IMAGE_BPP_BINARY: { + uint32_t *tmp = (uint32_t *) data; + + for (int y = 0, yy = h; y < yy; y++) { + uint32_t *row_ptr = IMAGE_COMPUTE_BINARY_PIXEL_ROW_PTR(img, y); + for (int x = 0, xx = w; x < xx; x++) { + float xxx = T4_00*x + T4_01*y + T4_02; + float yyy = T4_10*x + T4_11*y + T4_12; + float zzz = T4_20*x + T4_21*y + T4_22; + int sourceX = fast_roundf(xxx / zzz); + int sourceY = fast_roundf(yyy / zzz); + + if ((0 <= sourceX) && (sourceX < w) && (0 <= sourceY) && (sourceY < h)) { + uint32_t *ptr = tmp + (((w + UINT32_T_MASK) >> UINT32_T_SHIFT) * sourceY); + int pixel = IMAGE_GET_BINARY_PIXEL_FAST(ptr, sourceX); + IMAGE_PUT_BINARY_PIXEL_FAST(row_ptr, x, pixel); + } + } + } + break; + } + case IMAGE_BPP_GRAYSCALE: { + uint8_t *tmp = (uint8_t *) data; + + for (int y = 0, yy = h; y < yy; y++) { + uint8_t *row_ptr = IMAGE_COMPUTE_GRAYSCALE_PIXEL_ROW_PTR(img, y); + for (int x = 0, xx = w; x < xx; x++) { + float xxx = T4_00*x + T4_01*y + T4_02; + float yyy = T4_10*x + T4_11*y + T4_12; + float zzz = T4_20*x + T4_21*y + T4_22; + int sourceX = fast_roundf(xxx / zzz); + int sourceY = fast_roundf(yyy / zzz); + + if ((0 <= sourceX) && (sourceX < w) && (0 <= sourceY) && (sourceY < h)) { + uint8_t *ptr = tmp + (w * sourceY); + int pixel = IMAGE_GET_GRAYSCALE_PIXEL_FAST(ptr, sourceX); + IMAGE_PUT_GRAYSCALE_PIXEL_FAST(row_ptr, x, pixel); + } + } + } + break; + } + case IMAGE_BPP_RGB565: { + uint16_t *tmp = (uint16_t *) data; + + for (int y = 0, yy = h; y < yy; y++) { + uint16_t *row_ptr = IMAGE_COMPUTE_RGB565_PIXEL_ROW_PTR(img, y); + for (int x = 0, xx = w; x < xx; x++) { + float xxx = T4_00*x + T4_01*y + T4_02; + float yyy = T4_10*x + T4_11*y + T4_12; + float zzz = T4_20*x + T4_21*y + T4_22; + int sourceX = fast_roundf(xxx / zzz); + int sourceY = fast_roundf(yyy / zzz); + + if ((0 <= sourceX) && (sourceX < w) && (0 <= sourceY) && (sourceY < h)) { + uint16_t *ptr = tmp + (w * sourceY); + int pixel = IMAGE_GET_RGB565_PIXEL_FAST(ptr, sourceX); + IMAGE_PUT_RGB565_PIXEL_FAST(row_ptr, x, pixel); + } + } + } + break; + } + default: { + break; + } + } + + matd_destroy(T4); + } + + fb_free(); + matd_destroy(T3); matd_destroy(T2); matd_destroy(T1); diff --git a/src/omv/img/imlib.h b/src/omv/img/imlib.h index 17b5a56af..624a2f118 100644 --- a/src/omv/img/imlib.h +++ b/src/omv/img/imlib.h @@ -1332,7 +1332,7 @@ void imlib_illuminvar(image_t *img); void imlib_lens_corr(image_t *img, float strength, float zoom); void imlib_rotation_corr(image_t *img, float x_rotation, float y_rotation, float z_rotation, float x_translation, float y_translation, - float zoom); + float zoom, float fov, float *corners); // Statistics void imlib_get_similarity(image_t *img, const char *path, image_t *other, int scalar, float *avg, float *std, float *min, float *max); void imlib_get_histogram(histogram_t *out, image_t *ptr, rectangle_t *roi, list_t *thresholds, bool invert); diff --git a/src/omv/img/phasecorrelation.c b/src/omv/img/phasecorrelation.c index 192834ce5..f0865e15c 100644 --- a/src/omv/img/phasecorrelation.c +++ b/src/omv/img/phasecorrelation.c @@ -332,7 +332,7 @@ void imlib_phasecorrelate(image_t *img0, image_t *img1, rectangle_t *roi0, recta } } - imlib_rotation_corr(&img0_fixed, 0, 0, *rotation, 0, 0, *scale); + imlib_rotation_corr(&img0_fixed, 0, 0, *rotation, 0, 0, *scale, 60, NULL); } else { memcpy(&img0_fixed, img0, sizeof(image_t)); memcpy(&roi0_fixed, roi0, sizeof(rectangle_t)); diff --git a/src/omv/py/py_helper.c b/src/omv/py/py_helper.c index 44fe72fc9..093f00b09 100644 --- a/src/omv/py/py_helper.c +++ b/src/omv/py/py_helper.c @@ -170,6 +170,38 @@ void py_helper_keyword_float_array(uint n_args, const mp_obj_t *args, uint arg_i } } +float *py_helper_keyword_corner_array(uint n_args, const mp_obj_t *args, uint arg_index, + mp_map_t *kw_args, mp_obj_t kw) +{ + mp_map_elem_t *kw_arg = mp_map_lookup(kw_args, kw, MP_MAP_LOOKUP); + + if (kw_arg) { + mp_obj_t *arg_array; + mp_obj_get_array_fixed_n(kw_arg->value, 4, &arg_array); + float *corners = xalloc(sizeof(float) * 8); + for (int i = 0; i < 4; i++) { + mp_obj_t *arg_point; + mp_obj_get_array_fixed_n(arg_array[i], 2, &arg_point); + corners[(i*2)+0] = mp_obj_get_float(arg_point[0]); + corners[(i*2)+1] = mp_obj_get_float(arg_point[1]); + } + return corners; + } else if (n_args > arg_index) { + mp_obj_t *arg_array; + mp_obj_get_array_fixed_n(args[arg_index], 4, &arg_array); + float *corners = xalloc(sizeof(float) * 8); + for (int i = 0; i < 4; i++) { + mp_obj_t *arg_point; + mp_obj_get_array_fixed_n(arg_array[i], 2, &arg_point); + corners[(i*2)+0] = mp_obj_get_float(arg_point[0]); + corners[(i*2)+1] = mp_obj_get_float(arg_point[1]); + } + return corners; + } + + return NULL; +} + uint py_helper_consume_array(uint n_args, const mp_obj_t *args, uint arg_index, size_t len, const mp_obj_t **items) { if (MP_OBJ_IS_TYPE(args[arg_index], &mp_type_tuple) || MP_OBJ_IS_TYPE(args[arg_index], &mp_type_list)) { diff --git a/src/omv/py/py_helper.h b/src/omv/py/py_helper.h index b3bd7dad7..51194a29a 100644 --- a/src/omv/py/py_helper.h +++ b/src/omv/py/py_helper.h @@ -33,6 +33,8 @@ void py_helper_keyword_int_array(uint n_args, const mp_obj_t *args, uint arg_ind mp_map_t *kw_args, mp_obj_t kw, int *x, int size); void py_helper_keyword_float_array(uint n_args, const mp_obj_t *args, uint arg_index, mp_map_t *kw_args, mp_obj_t kw, float *x, int size); +float *py_helper_keyword_corner_array(uint n_args, const mp_obj_t *args, uint arg_index, + mp_map_t *kw_args, mp_obj_t kw); uint py_helper_consume_array(uint n_args, const mp_obj_t *args, uint arg_index, size_t len, const mp_obj_t **items); int py_helper_keyword_color(image_t *img, uint n_args, const mp_obj_t *args, uint arg_index, mp_map_t *kw_args, int default_val); diff --git a/src/omv/py/py_image.c b/src/omv/py/py_image.c index fe19ed920..b0a1aebe8 100644 --- a/src/omv/py/py_image.c +++ b/src/omv/py/py_image.c @@ -3183,11 +3183,11 @@ STATIC mp_obj_t py_image_lens_corr(uint n_args, const mp_obj_t *args, mp_map_t * PY_ASSERT_FALSE_MSG(arg_img->w % 2, "Width must be even!"); PY_ASSERT_FALSE_MSG(arg_img->h % 2, "Height must be even!"); float arg_strength = - py_helper_keyword_float(n_args, args, 1, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_strength), 1.8); - PY_ASSERT_TRUE_MSG(arg_strength > 0.0, "Strength must be > 0!"); + py_helper_keyword_float(n_args, args, 1, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_strength), 1.8f); + PY_ASSERT_TRUE_MSG(arg_strength > 0.0f, "Strength must be > 0!"); float arg_zoom = - py_helper_keyword_float(n_args, args, 2, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_zoom), 1.0); - PY_ASSERT_TRUE_MSG(arg_zoom > 0.0, "Zoom must be > 0!"); + py_helper_keyword_float(n_args, args, 2, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_zoom), 1.0f); + PY_ASSERT_TRUE_MSG(arg_zoom > 0.0f, "Zoom must be > 0!"); fb_alloc_mark(); imlib_lens_corr(arg_img, arg_strength, arg_zoom); @@ -3203,24 +3203,28 @@ STATIC mp_obj_t py_image_rotation_corr(uint n_args, const mp_obj_t *args, mp_map image_t *arg_img = py_helper_arg_to_image_mutable(args[0]); float arg_x_rotation = - IM_DEG2RAD(py_helper_keyword_float(n_args, args, 1, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_x_rotation), 0.0)); + IM_DEG2RAD(py_helper_keyword_float(n_args, args, 1, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_x_rotation), 0.0f)); float arg_y_rotation = - IM_DEG2RAD(py_helper_keyword_float(n_args, args, 2, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_y_rotation), 0.0)); + IM_DEG2RAD(py_helper_keyword_float(n_args, args, 2, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_y_rotation), 0.0f)); float arg_z_rotation = - IM_DEG2RAD(py_helper_keyword_float(n_args, args, 3, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_z_rotation), 0.0)); + IM_DEG2RAD(py_helper_keyword_float(n_args, args, 3, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_z_rotation), 0.0f)); float arg_x_translation = - py_helper_keyword_float(n_args, args, 4, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_x_translation), 0.0); + py_helper_keyword_float(n_args, args, 4, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_x_translation), 0.0f); float arg_y_translation = - py_helper_keyword_float(n_args, args, 5, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_y_translation), 0.0); + py_helper_keyword_float(n_args, args, 5, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_y_translation), 0.0f); float arg_zoom = - py_helper_keyword_float(n_args, args, 6, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_zoom), 1.0); - PY_ASSERT_TRUE_MSG(arg_zoom > 0.0, "Zoom must be > 0!"); + py_helper_keyword_float(n_args, args, 6, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_zoom), 1.0f); + PY_ASSERT_TRUE_MSG(arg_zoom > 0.0f, "Zoom must be > 0!"); + float arg_fov = + IM_DEG2RAD(py_helper_keyword_float(n_args, args, 7, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_fov), 60.0f)); + PY_ASSERT_TRUE_MSG((0.0f < arg_fov) && (arg_fov < 180.0f), "FOV must be > 0 and < 180!"); + float *arg_corners = py_helper_keyword_corner_array(n_args, args, 8, kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_corners)); fb_alloc_mark(); imlib_rotation_corr(arg_img, arg_x_rotation, arg_y_rotation, arg_z_rotation, arg_x_translation, arg_y_translation, - arg_zoom); + arg_zoom, arg_fov, arg_corners); fb_alloc_free_till_mark(); return args[0]; } diff --git a/src/omv/py/qstrdefsomv.h b/src/omv/py/qstrdefsomv.h index a3f60287c..c55d11688 100644 --- a/src/omv/py/qstrdefsomv.h +++ b/src/omv/py/qstrdefsomv.h @@ -814,6 +814,8 @@ Q(z_rotation) Q(x_translation) Q(y_translation) // duplicate Q(zoom) +Q(fov) +Q(corners) // Structural Similarity Q(get_similarity) @@ -940,7 +942,7 @@ Q(x_hist_bins_max) Q(y_hist_bins_max) // Blob Object Q(blob) -Q(corners) +// duplicate Q(corners) Q(min_corners) Q(rect) Q(x)