mirror of
https://github.com/openmv/openmv.git
synced 2025-11-04 14:49:50 +08:00
Re-implement line filters with Python callbacks.
This commit is contained in:
parent
5265211cc4
commit
2e2c563227
@ -164,7 +164,6 @@ FIRM_OBJ += $(addprefix $(BUILD)/$(OMV_DIR)/img/,\
|
||||
jpeg.o \
|
||||
lbp.o \
|
||||
eye.o \
|
||||
im_filter.o \
|
||||
)
|
||||
|
||||
FIRM_OBJ += $(addprefix $(BUILD)/$(OMV_DIR)/py/, \
|
||||
|
||||
@ -49,7 +49,6 @@ SRCS += $(addprefix img/, \
|
||||
jpeg.c \
|
||||
lbp.c \
|
||||
eye.c \
|
||||
im_filter.c \
|
||||
)
|
||||
|
||||
SRCS += $(addprefix py/, \
|
||||
|
||||
@ -1,72 +0,0 @@
|
||||
/*
|
||||
* This file is part of the OpenMV project.
|
||||
* Copyright (c) 2013/2014 Ibrahim Abdelkader <i.abdalkader@gmail.com>
|
||||
* This work is licensed under the MIT license, see the file LICENSE for details.
|
||||
*
|
||||
* Filter Functions.
|
||||
*
|
||||
* Filter functions bypass the default line processing in sensor.c, and pre-process lines.
|
||||
* Processing is done on the fly, i.e. filters are called from after each line is received.
|
||||
*
|
||||
* Note: bpp is the target bpp, not the line bpp (the line is always 2 bytes per pixel) if the target bpp is 1
|
||||
* it means the image currently being read is going to be Grayscale, and the function needs to output w*1bpp.
|
||||
*/
|
||||
#include <stdint.h>
|
||||
#include "imlib.h"
|
||||
|
||||
// RGB565 to YUV table
|
||||
extern const int8_t yuv_table[196608];
|
||||
|
||||
void im_filter_bw(uint8_t *src, uint8_t *dst, int size, int bpp, void *args)
|
||||
{
|
||||
int lower = ((int*)args)[0];
|
||||
int upper = ((int*)args)[1];
|
||||
|
||||
if (bpp == 1) {
|
||||
// Extract Y channel from YUV and process
|
||||
for (int i=0; i<size; i++) {
|
||||
dst[i] = (src[i<<1] >= lower && src[i<<1] <= upper) ? 0xFF : 0;
|
||||
}
|
||||
} else {
|
||||
// Lookup Y channel from RGB2YUV
|
||||
uint16_t *srcrgb = (uint16_t*) src;
|
||||
uint16_t *dstrgb = (uint16_t*) dst;
|
||||
for (int i=0; i<size; i++) {
|
||||
int y = yuv_table[srcrgb[i] * 3 + 0]+128;
|
||||
dstrgb[i] = (y >= lower && y <= upper) ? 0xFFFF : 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Thresholds taken from "Skin Segmentation Using YUV and RGB Color Spaces" Zaher Hamid Al-Tairi
|
||||
void im_filter_skin(uint8_t *src, uint8_t *dst, int size, int bpp, void *args)
|
||||
{
|
||||
if (bpp == 1) {
|
||||
// Kinda works
|
||||
for (int i=0; i<size; i+=2, src+=4) {
|
||||
//uint8_t y0 = src[0];
|
||||
uint8_t u = src[1];
|
||||
//uint8_t y1 = src[2];
|
||||
uint8_t v = src[3];
|
||||
// YCbCr
|
||||
dst[i+0] = (u>80 && u<130 && v>136 && v<200 && v>u) ? 255 : 0;
|
||||
dst[i+1] = (u>80 && u<130 && v>136 && v<200 && v>u) ? 255 : 0;
|
||||
|
||||
}
|
||||
} else {
|
||||
// This doesn't work
|
||||
uint16_t *srcrgb = (uint16_t*) src;
|
||||
uint16_t *dstrgb = (uint16_t*) dst;
|
||||
for (int i=0; i<size; i++) {
|
||||
int r = IM_R528(srcrgb[i]);
|
||||
int g = IM_G628(srcrgb[i]);
|
||||
int b = IM_B528(srcrgb[i]);
|
||||
//int y = yuv_table[srcrgb[i] * 3 + 0] + 128;
|
||||
int u = (int) yuv_table[srcrgb[i] * 3 + 1] + 128;
|
||||
int v = (int) yuv_table[srcrgb[i] * 3 + 2] + 128;
|
||||
dstrgb[i] = (u>80 && u<130 && v>136 && v<200 &&
|
||||
r>80 && g>30 && b>15 && (((r-g)*(r-g)) > 225)) ? srcrgb[i] : 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -24,24 +24,60 @@ static mp_obj_t py_sensor_reset() {
|
||||
return mp_const_none;
|
||||
}
|
||||
|
||||
static mp_obj_t py_sensor_snapshot() {
|
||||
/*
|
||||
* Filter functions bypass the default line processing in sensor.c, and pre-process lines before anything else.
|
||||
* Processing is done on the fly, i.e. line filters are called from sensor_snapshot after each line is readout.
|
||||
*
|
||||
*
|
||||
* Note2: This double indirection is to decouple omv/img code from omv/py code as much as possible.
|
||||
*/
|
||||
static void py_line_filter(uint8_t *src, int src_stride, uint8_t *dst, int dst_stride, void *args)
|
||||
{
|
||||
nlr_buf_t nlr;
|
||||
if (nlr_push(&nlr) == 0) {
|
||||
mp_call_function_2((mp_obj_t) args, // Callback function
|
||||
mp_obj_new_bytearray_by_ref(src_stride, src), // Source line buffer
|
||||
mp_obj_new_bytearray_by_ref(dst_stride, dst)); // Destination line buffer
|
||||
nlr_pop();
|
||||
} else {
|
||||
// Uncaught exception; disable the callback so it doesn't run again.
|
||||
sensor_set_line_filter(NULL, NULL);
|
||||
mp_obj_print_exception(&mp_plat_print, (mp_obj_t)nlr.ret_val);
|
||||
}
|
||||
}
|
||||
|
||||
static mp_obj_t py_sensor_snapshot(uint n_args, const mp_obj_t *args, mp_map_t *kw_args) {
|
||||
// Snapshot image
|
||||
mp_obj_t image = py_image(0, 0, 0, 0);
|
||||
|
||||
// Line pre-processing function and args
|
||||
mp_obj_t line_filter_args = NULL;
|
||||
line_filter_t line_filter_func = NULL;
|
||||
|
||||
// Sanity checks
|
||||
PY_ASSERT_FALSE_MSG((sensor.pixformat != PIXFORMAT_JPEG &&
|
||||
sensor.framesize > OMV_MAX_RAW_FRAME),
|
||||
"Raw image is only supported for "OMV_MAX_RAW_FRAME_STR" and smaller frames");
|
||||
|
||||
if (sensor_snapshot((struct image*) py_image_cobj(image))==-1) {
|
||||
// Lookup filter function
|
||||
mp_map_elem_t *kw_arg = mp_map_lookup(kw_args, MP_OBJ_NEW_QSTR(MP_QSTR_line_filter), MP_MAP_LOOKUP);
|
||||
if (kw_arg != NULL) {
|
||||
line_filter_args = kw_arg->value;
|
||||
line_filter_func = py_line_filter;
|
||||
}
|
||||
|
||||
if (sensor_snapshot((struct image*) py_image_cobj(image), line_filter_func, line_filter_args)==-1) {
|
||||
nlr_jump(mp_obj_new_exception_msg(&mp_type_RuntimeError, "Sensor Timeout!!"));
|
||||
return mp_const_false;
|
||||
}
|
||||
|
||||
return image;
|
||||
}
|
||||
|
||||
static mp_obj_t py_sensor_skip_frames(uint n_args, const mp_obj_t *args) {
|
||||
int frames = (n_args == 1) ? mp_obj_get_int(args[0]) : 10; // OV Recommended.
|
||||
for (int i = 0; i < frames; i++) {
|
||||
if (sensor_snapshot(NULL) == -1) {
|
||||
if (sensor_snapshot(NULL, NULL, NULL) == -1) {
|
||||
nlr_jump(mp_obj_new_exception_msg(&mp_type_RuntimeError, "Sensor Timeout!!"));
|
||||
}
|
||||
}
|
||||
@ -220,44 +256,6 @@ static mp_obj_t py_sensor_set_special_effect(mp_obj_t sde) {
|
||||
return mp_const_true;
|
||||
}
|
||||
|
||||
#define MP_MAP_LOOKUP_STR(s) mp_map_lookup(kw_args, MP_OBJ_NEW_QSTR(qstr_from_str(s)), MP_MAP_LOOKUP)
|
||||
static mp_obj_t py_sensor_set_image_filter(uint n_args, const mp_obj_t *args, mp_map_t *kw_args) {
|
||||
int *im_filter_args=NULL;
|
||||
im_filter_t im_filter=NULL;
|
||||
im_filter_type_t im_filter_type = mp_obj_get_int(args[0]);
|
||||
|
||||
switch (im_filter_type) {
|
||||
case IM_FILTER_BW: {
|
||||
int lower = 200, upper = 255;
|
||||
// Read keyword arguments
|
||||
mp_map_elem_t *kw_lower = MP_MAP_LOOKUP_STR("lower");
|
||||
mp_map_elem_t *kw_upper = MP_MAP_LOOKUP_STR("upper");
|
||||
|
||||
if (kw_lower) {
|
||||
lower = mp_obj_get_int(kw_lower->value);
|
||||
}
|
||||
|
||||
if (kw_upper) {
|
||||
lower = mp_obj_get_int(kw_lower->value);
|
||||
}
|
||||
|
||||
im_filter = &im_filter_bw;
|
||||
im_filter_args = (int*) xalloc(2*sizeof(int));
|
||||
im_filter_args[0] = lower;
|
||||
im_filter_args[1] = upper;
|
||||
break;
|
||||
}
|
||||
case IM_FILTER_SKIN:
|
||||
im_filter = &im_filter_skin;
|
||||
break;
|
||||
}
|
||||
|
||||
if (sensor_set_image_filter(im_filter, im_filter_args) != 0) {
|
||||
return mp_const_false;
|
||||
}
|
||||
|
||||
return mp_const_true;
|
||||
}
|
||||
static mp_obj_t py_sensor_write_reg(mp_obj_t addr, mp_obj_t val) {
|
||||
sensor_write_reg(mp_obj_get_int(addr), mp_obj_get_int(val));
|
||||
return mp_const_none;
|
||||
@ -273,7 +271,7 @@ static mp_obj_t py_sensor_read_reg(mp_obj_t addr) {
|
||||
//}
|
||||
|
||||
STATIC MP_DEFINE_CONST_FUN_OBJ_0(py_sensor_reset_obj, py_sensor_reset);
|
||||
STATIC MP_DEFINE_CONST_FUN_OBJ_0(py_sensor_snapshot_obj, py_sensor_snapshot);
|
||||
STATIC MP_DEFINE_CONST_FUN_OBJ_KW(py_sensor_snapshot_obj, 0, py_sensor_snapshot);
|
||||
STATIC MP_DEFINE_CONST_FUN_OBJ_VAR_BETWEEN(py_sensor_skip_frames_obj, 0, 1, py_sensor_skip_frames);
|
||||
STATIC MP_DEFINE_CONST_FUN_OBJ_0(py_sensor_get_fb_obj, py_sensor_get_fb);
|
||||
STATIC MP_DEFINE_CONST_FUN_OBJ_0(py_sensor_get_id_obj, py_sensor_get_id);
|
||||
@ -292,7 +290,6 @@ STATIC MP_DEFINE_CONST_FUN_OBJ_1(py_sensor_set_exposure_ctrl_obj, py_sensor_se
|
||||
STATIC MP_DEFINE_CONST_FUN_OBJ_1(py_sensor_set_hmirror_obj, py_sensor_set_hmirror);
|
||||
STATIC MP_DEFINE_CONST_FUN_OBJ_1(py_sensor_set_vflip_obj, py_sensor_set_vflip);
|
||||
STATIC MP_DEFINE_CONST_FUN_OBJ_1(py_sensor_set_special_effect_obj, py_sensor_set_special_effect);
|
||||
STATIC MP_DEFINE_CONST_FUN_OBJ_KW(py_sensor_set_image_filter_obj,1, py_sensor_set_image_filter);
|
||||
STATIC MP_DEFINE_CONST_FUN_OBJ_2(py_sensor_write_reg_obj, py_sensor_write_reg);
|
||||
STATIC MP_DEFINE_CONST_FUN_OBJ_1(py_sensor_read_reg_obj, py_sensor_read_reg);
|
||||
|
||||
@ -311,10 +308,6 @@ STATIC const mp_map_elem_t globals_dict_table[] = {
|
||||
{ MP_OBJ_NEW_QSTR(MP_QSTR_NORMAL), MP_OBJ_NEW_SMALL_INT(SDE_NORMAL)}, /* Normal/No SDE */
|
||||
{ MP_OBJ_NEW_QSTR(MP_QSTR_NEGATIVE), MP_OBJ_NEW_SMALL_INT(SDE_NEGATIVE)}, /* Negative image */
|
||||
|
||||
// Image filters
|
||||
{ MP_OBJ_NEW_QSTR(MP_QSTR_FILTER_BW), MP_OBJ_NEW_SMALL_INT(IM_FILTER_BW)}, /* Black/White filter */
|
||||
{ MP_OBJ_NEW_QSTR(MP_QSTR_FILTER_SKIN), MP_OBJ_NEW_SMALL_INT(IM_FILTER_SKIN)}, /* Skin filter */
|
||||
|
||||
// Frame size
|
||||
{ MP_OBJ_NEW_QSTR(MP_QSTR_QQCIF), MP_OBJ_NEW_SMALL_INT(FRAMESIZE_QQCIF)}, /* 88x72 */
|
||||
{ MP_OBJ_NEW_QSTR(MP_QSTR_QQVGA), MP_OBJ_NEW_SMALL_INT(FRAMESIZE_QQVGA)}, /* 160x120 */
|
||||
@ -349,7 +342,6 @@ STATIC const mp_map_elem_t globals_dict_table[] = {
|
||||
{ MP_OBJ_NEW_QSTR(MP_QSTR_set_hmirror), (mp_obj_t)&py_sensor_set_hmirror_obj },
|
||||
{ MP_OBJ_NEW_QSTR(MP_QSTR_set_vflip), (mp_obj_t)&py_sensor_set_vflip_obj },
|
||||
{ MP_OBJ_NEW_QSTR(MP_QSTR_set_special_effect), (mp_obj_t)&py_sensor_set_special_effect_obj },
|
||||
{ MP_OBJ_NEW_QSTR(MP_QSTR_set_image_filter), (mp_obj_t)&py_sensor_set_image_filter_obj },
|
||||
{ MP_OBJ_NEW_QSTR(MP_QSTR___write_reg), (mp_obj_t)&py_sensor_write_reg_obj },
|
||||
{ MP_OBJ_NEW_QSTR(MP_QSTR___read_reg), (mp_obj_t)&py_sensor_read_reg_obj },
|
||||
};
|
||||
|
||||
@ -152,9 +152,7 @@ Q(UXGA)
|
||||
Q(OV9650)
|
||||
Q(OV2640)
|
||||
Q(OV7725)
|
||||
// Filters
|
||||
Q(FILTER_BW)
|
||||
Q(FILTER_SKIN)
|
||||
Q(line_filter)
|
||||
|
||||
//SDE
|
||||
Q(NORMAL)
|
||||
@ -180,7 +178,6 @@ Q(set_exposure_ctrl)
|
||||
Q(set_hmirror)
|
||||
Q(set_vflip)
|
||||
Q(set_special_effect)
|
||||
Q(set_image_filter)
|
||||
Q(__write_reg)
|
||||
Q(__read_reg)
|
||||
|
||||
|
||||
@ -278,8 +278,9 @@ int sensor_reset()
|
||||
sensor.framerate=0xFF;
|
||||
sensor.gainceiling=0xFF;
|
||||
|
||||
|
||||
// Reset image filter
|
||||
sensor_set_image_filter(NULL, NULL);
|
||||
sensor_set_line_filter(NULL, NULL);
|
||||
|
||||
// Call sensor-specific reset function
|
||||
sensor.reset(&sensor);
|
||||
@ -524,10 +525,11 @@ int sensor_set_special_effect(sde_t sde)
|
||||
return 0;
|
||||
}
|
||||
|
||||
int sensor_set_image_filter(im_filter_t filter, void *args)
|
||||
int sensor_set_line_filter(line_filter_t line_filter_func, void *line_filter_args)
|
||||
{
|
||||
sensor.im_filter = filter;
|
||||
sensor.im_filter_args = args;
|
||||
// Set line pre-processing function and args
|
||||
sensor.line_filter_func = line_filter_func;
|
||||
sensor.line_filter_args = line_filter_args;
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -544,11 +546,13 @@ void DCMI_DMAConvCpltUser(uint32_t addr)
|
||||
dst += FB_JPEG_OFFS_SIZE;
|
||||
}
|
||||
|
||||
if (sensor.im_filter != NULL) {
|
||||
dst += line++ * fb->w * ((sensor.pixformat == PIXFORMAT_GRAYSCALE) ? 1:2);
|
||||
if (sensor.line_filter_func && sensor.line_filter_args) {
|
||||
int bpp = ((sensor.pixformat == PIXFORMAT_GRAYSCALE) ? 1:2);
|
||||
dst += line++ * fb->w * bpp;
|
||||
// If there's an image filter installed call it.
|
||||
sensor.im_filter(src, dst, fb->w,
|
||||
(sensor.pixformat == PIXFORMAT_GRAYSCALE) ? 1:2, sensor.im_filter_args);
|
||||
// Note: BPP is the target BPP, not the line bpp (the line is always 2 bytes per pixel) if the target BPP is 1
|
||||
// it means the image currently being read is going to be Grayscale, and the function needs to output w * 1BPP.
|
||||
sensor.line_filter_func(src, fb->w * 2 , dst, fb->w * bpp, sensor.line_filter_args);
|
||||
} else {
|
||||
// Else just process the line normally.
|
||||
if (sensor.pixformat == PIXFORMAT_GRAYSCALE) {
|
||||
@ -569,12 +573,15 @@ void DCMI_DMAConvCpltUser(uint32_t addr)
|
||||
// The JPEG offset allows JPEG compression of the framebuffer without overwriting the pixels.
|
||||
// The offset size may need to be adjusted depending on the quality, otherwise JPEG data may
|
||||
// overwrite image pixels before they are compressed.
|
||||
int sensor_snapshot(image_t *image)
|
||||
int sensor_snapshot(image_t *image, line_filter_t line_filter_func, void *line_filter_args)
|
||||
{
|
||||
volatile uint32_t addr;
|
||||
volatile uint16_t length;
|
||||
uint32_t snapshot_start;
|
||||
|
||||
// Set line filter
|
||||
sensor_set_line_filter(line_filter_func, line_filter_args);
|
||||
|
||||
// Compress the framebuffer for the IDE only for non-JPEG images and
|
||||
// only if the IDE has requested a framebuffer and it's not the first frame.
|
||||
// Note: This doesn't run unless the camera is connected to PC.
|
||||
|
||||
@ -78,14 +78,7 @@ typedef enum {
|
||||
ACTIVE_HIGH
|
||||
} reset_polarity_t;
|
||||
|
||||
// Sensor filter functions
|
||||
// These functions process single image lines.
|
||||
typedef enum {
|
||||
IM_FILTER_BW,
|
||||
IM_FILTER_SKIN
|
||||
} im_filter_type_t;
|
||||
|
||||
typedef void (*im_filter_t) (uint8_t *src, uint8_t *dst, int size, int bpp, void *args);
|
||||
typedef void (*line_filter_t) (uint8_t *src, int src_stride, uint8_t *dst, int dst_stride, void *args);
|
||||
|
||||
#define SENSOR_HW_FLAGS_VSYNC (0) // vertical sync polarity.
|
||||
#define SENSOR_HW_FLAGS_HSYNC (1) // horizontal sync polarity.
|
||||
@ -103,8 +96,10 @@ typedef struct _sensor {
|
||||
uint8_t slv_addr; // Sensor I2C slave address.
|
||||
uint32_t hw_flags; // Hardware flags (clock polarities/hw capabilities)
|
||||
|
||||
void *im_filter_args;
|
||||
im_filter_t im_filter;
|
||||
// Line pre-processing function and args
|
||||
void *line_filter_args;
|
||||
line_filter_t line_filter_func;
|
||||
|
||||
reset_polarity_t reset_pol; // Reset polarity (TODO move to hw_flags)
|
||||
|
||||
// Sensor state
|
||||
@ -159,7 +154,7 @@ int sensor_write_reg(uint8_t reg, uint8_t val);
|
||||
int sensor_enable_jpeg(bool enable);
|
||||
|
||||
// Capture a Snapshot.
|
||||
int sensor_snapshot(image_t *image);
|
||||
int sensor_snapshot(image_t *image, line_filter_t line_filter_func, void *line_filter_args);
|
||||
|
||||
// Capture the frame buffer.
|
||||
int sensor_get_fb(image_t *img);
|
||||
@ -211,5 +206,5 @@ int sensor_set_vflip(int enable);
|
||||
int sensor_set_special_effect(sde_t sde);
|
||||
|
||||
// Set filter function.
|
||||
int sensor_set_image_filter(im_filter_t im_filter, void *args);
|
||||
int sensor_set_line_filter(line_filter_t line_filter_func, void *line_filter_args);
|
||||
#endif /* __SENSOR_H__ */
|
||||
|
||||
37
usr/examples/04-Image-Filters/line_filter.py
Normal file
37
usr/examples/04-Image-Filters/line_filter.py
Normal file
@ -0,0 +1,37 @@
|
||||
# Line Filter Example
|
||||
#
|
||||
# The sensor module can preform some basic image processing during the image readout without
|
||||
# Additional overhead. This example shows off how to apply some basic line filters in Python.
|
||||
#
|
||||
# WARNING - This feature does Not work fast enough on M4 when line pre-processing is implemented
|
||||
# in Python. In the future this might be fixed somehow, for now You'll see a partial framebuffer.
|
||||
|
||||
import sensor, image, time
|
||||
|
||||
# Initialize the camera sensor.
|
||||
sensor.reset()
|
||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
||||
sensor.set_framesize(sensor.QQVGA)
|
||||
clock = time.clock() # Tracks FPS.
|
||||
|
||||
# Copy source to destination.
|
||||
# Note source is YUYV destination is 1BPP Grayscale
|
||||
def line_filter_copy(src, dst):
|
||||
for i in range(0, len(dst), 1):
|
||||
dst[i] = src[i<<1]
|
||||
|
||||
# Segment the image by following thresholds.
|
||||
# Note source is YUYV destination is 1BPP Grayscale
|
||||
def line_filter_bw(src, dst):
|
||||
for i in range(0, len(dst), 1):
|
||||
if (src[i<<1] > 200 and src[i<<1] < 255):
|
||||
dst[i] = 0xFF
|
||||
else:
|
||||
dst[i] = 0x00
|
||||
|
||||
while(True):
|
||||
clock.tick() # Track elapsed milliseconds between snapshots().
|
||||
lines = 0
|
||||
img = sensor.snapshot(line_filter = line_filter_copy) # Take a picture and return the image.
|
||||
#print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
|
||||
# connected to your computer. The FPS should increase once disconnected.
|
||||
Loading…
Reference in New Issue
Block a user