Add streaming support to snapshot function.

This commit is contained in:
iabdalkader 2018-10-01 03:08:26 +02:00
parent bfaf1d5110
commit 0f1fce6b31
6 changed files with 100 additions and 161 deletions

View File

@ -288,7 +288,7 @@ static int set_lens_correction(sensor_t *sensor, int enable, int radi, int coef)
return 0; return 0;
} }
static int snapshot(sensor_t *sensor, image_t *image) static int snapshot(sensor_t *sensor, image_t *image, streaming_cb_t cb)
{ {
fb_update_jpeg_buffer(); fb_update_jpeg_buffer();

View File

@ -12,6 +12,7 @@
#include "mt9v034.h" #include "mt9v034.h"
#include "systick.h" #include "systick.h"
#include "framebuffer.h" #include "framebuffer.h"
#include "sensor.h"
#include "omv_boardconfig.h" #include "omv_boardconfig.h"
#if defined(OMV_ENABLE_MT9V034) #if defined(OMV_ENABLE_MT9V034)
#define MT9V034_MAX_HEIGHT (480) #define MT9V034_MAX_HEIGHT (480)
@ -386,9 +387,7 @@ static int set_lens_correction(sensor_t *sensor, int enable, int radi, int coef)
return 0; return 0;
} }
extern int sensor_snapshot(sensor_t *sensor, image_t *image); static int snapshot(sensor_t *sensor, image_t *image, streaming_cb_t cb)
static int snapshot(sensor_t *sensor, image_t *image)
{ {
if ((!sensor->pixformat) || (!sensor->framesize) || (MT9V034_mode == MT9V034_NOT_SET)) { if ((!sensor->pixformat) || (!sensor->framesize) || (MT9V034_mode == MT9V034_NOT_SET)) {
return -1; return -1;
@ -399,7 +398,7 @@ static int snapshot(sensor_t *sensor, image_t *image)
image_t new_image; image_t new_image;
DCMI_FSIN_HIGH(); DCMI_FSIN_HIGH();
int ret = sensor_snapshot(sensor, &new_image); int ret = sensor_snapshot(sensor, &new_image, NULL);
DCMI_FSIN_LOW(); DCMI_FSIN_LOW();
sensor->pixformat = pixformat_bak; sensor->pixformat = pixformat_bak;

View File

@ -48,7 +48,7 @@ static mp_obj_t py_sensor_snapshot(uint n_args, const mp_obj_t *args, mp_map_t *
// Sanity checks // Sanity checks
PY_ASSERT_TRUE_MSG((sensor.pixformat != PIXFORMAT_JPEG), "Operation not supported on JPEG"); PY_ASSERT_TRUE_MSG((sensor.pixformat != PIXFORMAT_JPEG), "Operation not supported on JPEG");
if (sensor.snapshot(&sensor, (image_t*) py_image_cobj(image))==-1) { if (sensor.snapshot(&sensor, (image_t*) py_image_cobj(image), NULL)==-1) {
nlr_raise(mp_obj_new_exception_msg(&mp_type_RuntimeError, "Sensor Timeout!!")); nlr_raise(mp_obj_new_exception_msg(&mp_type_RuntimeError, "Sensor Timeout!!"));
return mp_const_false; return mp_const_false;
} }
@ -69,7 +69,7 @@ static mp_obj_t py_sensor_skip_frames(uint n_args, const mp_obj_t *args, mp_map_
if (!n_args) { if (!n_args) {
while ((systick_current_millis() - millis) < time) { // 32-bit math handles wrap arrounds... while ((systick_current_millis() - millis) < time) { // 32-bit math handles wrap arrounds...
if (sensor.snapshot(&sensor, NULL) == -1) { if (sensor.snapshot(&sensor, NULL, NULL) == -1) {
nlr_raise(mp_obj_new_exception_msg(&mp_type_RuntimeError, "Sensor Timeout!!")); nlr_raise(mp_obj_new_exception_msg(&mp_type_RuntimeError, "Sensor Timeout!!"));
} }
} }
@ -79,7 +79,7 @@ static mp_obj_t py_sensor_skip_frames(uint n_args, const mp_obj_t *args, mp_map_
break; break;
} }
if (sensor.snapshot(&sensor, NULL) == -1) { if (sensor.snapshot(&sensor, NULL, NULL) == -1) {
nlr_raise(mp_obj_new_exception_msg(&mp_type_RuntimeError, "Sensor Timeout!!")); nlr_raise(mp_obj_new_exception_msg(&mp_type_RuntimeError, "Sensor Timeout!!"));
} }
} }

View File

@ -23,7 +23,7 @@
#define OV_CHIP_ID (0x0A) #define OV_CHIP_ID (0x0A)
#define ON_CHIP_ID (0x00) #define ON_CHIP_ID (0x00)
#define MAX_XFER_SIZE (0xFFFC) #define MAX_XFER_SIZE (0xFFFC*4)
sensor_t sensor = {0}; sensor_t sensor = {0};
TIM_HandleTypeDef TIMHandle = {0}; TIM_HandleTypeDef TIMHandle = {0};
@ -811,8 +811,11 @@ void DCMI_DMAConvCpltUser(uint32_t addr)
// This is the default snapshot function, which can be replaced in sensor_init functions. This function // This is the default snapshot function, which can be replaced in sensor_init functions. This function
// uses the DCMI and DMA to capture frames and each line is processed in the DCMI_DMAConvCpltUser function. // uses the DCMI and DMA to capture frames and each line is processed in the DCMI_DMAConvCpltUser function.
int sensor_snapshot(sensor_t *sensor, image_t *image) int sensor_snapshot(sensor_t *sensor, image_t *image, streaming_cb_t streaming_cb)
{ {
uint32_t frame = 0;
bool streaming = (streaming_cb != NULL); // Streaming mode.
bool doublebuf = false; // Use double buffers in streaming mode.
uint32_t addr, length, tick_start; uint32_t addr, length, tick_start;
// Compress the framebuffer for the IDE preview, only if it's not the first frame, // Compress the framebuffer for the IDE preview, only if it's not the first frame,
@ -824,6 +827,10 @@ int sensor_snapshot(sensor_t *sensor, image_t *image)
// the format is set to GS, otherwise the pixel format will be swicthed to BAYER. // the format is set to GS, otherwise the pixel format will be swicthed to BAYER.
sensor_check_buffsize(); sensor_check_buffsize();
// Set the current frame buffer target used in the DMA line callback
// (DCMI_DMAConvCpltUser function), in both snapshot and streaming modes.
dest_fb = MAIN_FB()->pixels;
// The user may have changed the MAIN_FB width or height on the last image so we need // The user may have changed the MAIN_FB width or height on the last image so we need
// to restore that here. We don't have to restore bpp because that's taken care of // to restore that here. We don't have to restore bpp because that's taken care of
// already in the code below. Note that we do the JPEG compression above first to save // already in the code below. Note that we do the JPEG compression above first to save
@ -841,17 +848,17 @@ int sensor_snapshot(sensor_t *sensor, image_t *image)
case PIXFORMAT_RGB565: case PIXFORMAT_RGB565:
case PIXFORMAT_YUV422: case PIXFORMAT_YUV422:
// RGB/YUV read 2 bytes per pixel. // RGB/YUV read 2 bytes per pixel.
length = (w * h * 2)/4; length = (w * h * 2);
addr = (uint32_t) &_line_buf; addr = (uint32_t) &_line_buf;
break; break;
case PIXFORMAT_BAYER: case PIXFORMAT_BAYER:
// BAYER/RAW: 1 byte per pixel // BAYER/RAW: 1 byte per pixel
length = (w * h * 1)/4; length = (w * h * 1);
addr = (uint32_t) &_line_buf; addr = (uint32_t) &_line_buf;
break; break;
case PIXFORMAT_GRAYSCALE: case PIXFORMAT_GRAYSCALE:
// 1/2BPP Grayscale. // 1/2BPP Grayscale.
length = (w * h * sensor->gs_bpp)/4; length = (w * h * sensor->gs_bpp);
addr = (uint32_t) &_line_buf; addr = (uint32_t) &_line_buf;
break; break;
case PIXFORMAT_JPEG: case PIXFORMAT_JPEG:
@ -863,6 +870,14 @@ int sensor_snapshot(sensor_t *sensor, image_t *image)
return -1; return -1;
} }
if (streaming_cb) {
image->pixels = NULL;
}
// If two frames fit in ram, use double buffering in streaming mode.
doublebuf = ((length*2) <= OMV_RAW_BUF_SIZE);
do {
// Clear line counter // Clear line counter
line = 0; line = 0;
@ -872,17 +887,20 @@ int sensor_snapshot(sensor_t *sensor, image_t *image)
// Enable DMA IRQ // Enable DMA IRQ
HAL_NVIC_EnableIRQ(DMA2_Stream1_IRQn); HAL_NVIC_EnableIRQ(DMA2_Stream1_IRQn);
// Set the frameb buffer used by the line processing function.
dest_fb = MAIN_FB()->pixels;
if (sensor->pixformat == PIXFORMAT_JPEG) { if (sensor->pixformat == PIXFORMAT_JPEG) {
// Start a regular transfer // Start a regular transfer
HAL_DCMI_Start_DMA(&DCMIHandle, HAL_DCMI_Start_DMA(&DCMIHandle,
DCMI_MODE_SNAPSHOT, addr, length); DCMI_MODE_SNAPSHOT, addr, length/4);
} else { } else {
// Start a multibuffer transfer (line by line) // Start a multibuffer transfer (line by line)
HAL_DCMI_Start_DMA_MB(&DCMIHandle, HAL_DCMI_Start_DMA_MB(&DCMIHandle,
DCMI_MODE_SNAPSHOT, addr, length, h); DCMI_MODE_SNAPSHOT, addr, length/4, h);
}
if (streaming_cb && doublebuf && image->pixels != NULL) {
// Call streaming callback function with previous frame.
// Note: Image pointer should Not be NULL in streaming mode.
streaming = streaming_cb(image);
} }
// Wait for frame // Wait for frame
@ -932,116 +950,29 @@ int sensor_snapshot(sensor_t *sensor, image_t *image)
image->h = MAIN_FB()->h; image->h = MAIN_FB()->h;
image->bpp = MAIN_FB()->bpp; image->bpp = MAIN_FB()->bpp;
image->pixels = MAIN_FB()->pixels; image->pixels = MAIN_FB()->pixels;
}
return 0; if (streaming_cb) {
} // In streaming mode, either switch frame buffers in double buffer mode,
// or call the streaming callback with the main FB in single buffer mode.
int sensor_start_streaming(sensor_t *sensor, streaming_cb_t streaming_cb) if (doublebuf == false) {
{ // In single buffer mode, call streaming callback.
uint32_t frame = 0; streaming = streaming_cb(image);
bool streaming = true; } else {
bool doublebuf = false; // In double buffer mode, switch frame buffers.
uint32_t length, tick_start;
image_t image;
image.w = MAIN_FB()->w;
image.h = MAIN_FB()->h;
image.pixels = NULL;
// Setup the size and address of the transfer
switch (sensor->pixformat) {
case PIXFORMAT_RGB565:
case PIXFORMAT_YUV422:
// RGB/YUV read 2 bytes per pixel.
image.bpp = 2;
length = (image.w * image.h * 2);
break;
case PIXFORMAT_BAYER:
// BAYER/RAW: 1 byte per pixel
// TODO: Note BAYER is not supported by UVC yet.
image.bpp = 3;
length = (image.w * image.h * 1);
break;
case PIXFORMAT_GRAYSCALE:
// 1/2BPP Grayscale.
image.bpp = 1;
length = (image.w * image.h * sensor->gs_bpp);
break;
default:
return -1;
}
dest_fb = MAIN_FB()->pixels;
doublebuf = ((length*2) <= OMV_RAW_BUF_SIZE);
while (streaming) {
// Clear line counter
line = 0;
// Snapshot start tick
tick_start = HAL_GetTick();
// Enable DMA IRQ
HAL_NVIC_EnableIRQ(DMA2_Stream1_IRQn);
// Start a multibuffer transfer (line by line).
HAL_DCMI_Start_DMA_MB(&DCMIHandle,
DCMI_MODE_SNAPSHOT, (uint32_t) &_line_buf, length/4, image.h);
if (doublebuf) {
// Call streaming function with previous frame.
if (image.pixels != NULL) {
streaming = streaming_cb(&image);
}
}
// Wait for current frame
while ((DCMI->CR & DCMI_CR_CAPTURE) != 0) {
// Wait for interrupt
__WFI();
if ((HAL_GetTick() - tick_start) >= 3000) {
// Sensor timeout, most likely a HW issue.
// Abort the DMA request.
HAL_DMA_Abort(&DMAHandle);
return -1;
}
}
// Abort DMA transfer.
// Note: In JPEG mode the DMA will still be waiting for data since
// the max frame size is set, so we need to abort the DMA transfer.
HAL_DMA_Abort(&DMAHandle);
// Disable DMA IRQ
HAL_NVIC_DisableIRQ(DMA2_Stream1_IRQn);
if (doublebuf) {
if (frame == 0) { if (frame == 0) {
image.pixels = MAIN_FB()->pixels; image->pixels = MAIN_FB()->pixels;
// Next frame will be transfered to the second half. // Next frame will be transfered to the second half.
dest_fb = MAIN_FB()->pixels + length; dest_fb = MAIN_FB()->pixels + length;
} else { } else {
image.pixels = MAIN_FB()->pixels + length; image->pixels = MAIN_FB()->pixels + length;
// Next frame will be transfered to the first half. // Next frame will be transfered to the first half.
dest_fb = MAIN_FB()->pixels; dest_fb = MAIN_FB()->pixels;
} }
#if __DCACHE_PRESENT == 1 frame ^= 1; // Switch frame buffers.
#define CLEANINVALIDATE_DCACHE(addr, size) \
(SCB_CleanInvalidateDCache_by_Addr((uint32_t*)((uint32_t)addr & ~0x1f), \
((uint32_t)((uint8_t*)addr + size + 0x1f) & ~0x1f) - ((uint32_t)addr & ~0x1f)))
// Not using linebuf, need to invalidate the cache.
CLEANINVALIDATE_DCACHE(image.pixels, length);
#endif
// Switch frame buffer.
frame ^= 1;
} else {
image.pixels = MAIN_FB()->pixels;
streaming = streaming_cb(&image);
} }
} }
}
} while (streaming == true);
return 0; return 0;
} }

View File

@ -149,7 +149,7 @@ typedef struct _sensor {
int (*set_vflip) (sensor_t *sensor, int enable); int (*set_vflip) (sensor_t *sensor, int enable);
int (*set_special_effect) (sensor_t *sensor, sde_t sde); int (*set_special_effect) (sensor_t *sensor, sde_t sde);
int (*set_lens_correction) (sensor_t *sensor, int enable, int radi, int coef); int (*set_lens_correction) (sensor_t *sensor, int enable, int radi, int coef);
int (*snapshot) (sensor_t *sensor, image_t *image); int (*snapshot) (sensor_t *sensor, image_t *image, streaming_cb_t streaming_cb);
} sensor_t; } sensor_t;
// Resolution table // Resolution table
@ -244,7 +244,5 @@ int sensor_set_lens_correction(int enable, int radi, int coef);
int sensor_set_vsync_output(GPIO_TypeDef *gpio, uint32_t pin); int sensor_set_vsync_output(GPIO_TypeDef *gpio, uint32_t pin);
// Default snapshot function. // Default snapshot function.
int sensor_snapshot(sensor_t *sensor, image_t *image); int sensor_snapshot(sensor_t *sensor, image_t *image, streaming_cb_t streaming_cb);
int sensor_start_streaming(sensor_t *sensor, streaming_cb_t cb);
#endif /* __SENSOR_H__ */ #endif /* __SENSOR_H__ */

View File

@ -89,6 +89,10 @@ bool streaming_cb(image_t *image)
} }
} }
// TODO: This is a hack for old snapshot functions that don't support
// the streaming mode yet, we call the streaming function explicitly.
image->pixels = NULL;
if (g_uvc_stream_status != 2 || if (g_uvc_stream_status != 2 ||
frame_index != videoCommitControl.bFrameIndex || frame_index != videoCommitControl.bFrameIndex ||
format_index != videoCommitControl.bFormatIndex) { format_index != videoCommitControl.bFormatIndex) {
@ -174,7 +178,14 @@ int main()
format_index = videoCommitControl.bFormatIndex; format_index = videoCommitControl.bFormatIndex;
} }
sensor_start_streaming(&sensor, streaming_cb); image_t image;
image.pixels = NULL;
sensor.snapshot(&sensor, &image, streaming_cb);
if (image.pixels) {
// TODO: This is a hack for old snapshot functions that don't support
// the streaming mode yet, we call the streaming function explicitly.
streaming_cb(&image);
}
} }
} }
} }