mirror of
https://github.com/openmv/openmv.git
synced 2025-11-04 14:49:50 +08:00
Merge pull request #123 from kwagyeman/master
Refactor some example scripts.
This commit is contained in:
commit
fe36dd8ff4
17
usr/examples/02-Board-Control/dac_control.py
Normal file
17
usr/examples/02-Board-Control/dac_control.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# DAC Control Example
|
||||||
|
#
|
||||||
|
# This example shows how to use the DAC pin output onboard your OpenMV Cam.
|
||||||
|
|
||||||
|
import time
|
||||||
|
from pyb import DAC
|
||||||
|
|
||||||
|
dac = DAC("P6") # Must always be "P6".
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
# The DAC has 8-12 bits of resolution (default 8-bits).
|
||||||
|
for i in range(256):
|
||||||
|
dac.write(i)
|
||||||
|
time.sleep(20)
|
||||||
|
for i in range(256):
|
||||||
|
dac.write(255-i)
|
||||||
|
time.sleep(20)
|
||||||
19
usr/examples/02-Board-Control/i2c_control.py
Normal file
19
usr/examples/02-Board-Control/i2c_control.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# I2C Control
|
||||||
|
#
|
||||||
|
# This example shows how to use the i2c bus on your OpenMV Cam by dumping the
|
||||||
|
# contents on a standard EEPROM. To run this example either connect the
|
||||||
|
# Thermopile Shield to your OpenMV Cam or an I2C EEPROM to your OpenMV Cam.
|
||||||
|
|
||||||
|
from pyb import I2C
|
||||||
|
|
||||||
|
i2c = I2C(2, I2C.MASTER) # The i2c bus must always be 2.
|
||||||
|
mem = i2c.mem_read(256, 0x50, 0) # The eeprom slave address is 0x50.
|
||||||
|
|
||||||
|
print("\n[")
|
||||||
|
for i in range(16):
|
||||||
|
print("\t[", end='')
|
||||||
|
for j in range(16):
|
||||||
|
print("%03d" % mem[(i*16)+j], end='')
|
||||||
|
if j != 15: print(", ", end='')
|
||||||
|
print("]," if i != 15 else "]")
|
||||||
|
print("]")
|
||||||
@ -1,132 +0,0 @@
|
|||||||
# Copy this module to storage and import it if you want
|
|
||||||
# to use it in your own scripts. See example usage below.
|
|
||||||
from time import sleep
|
|
||||||
from pyb import Pin, SPI
|
|
||||||
|
|
||||||
class LCD:
|
|
||||||
def reset(self):
|
|
||||||
self.rst.low()
|
|
||||||
sleep(100)
|
|
||||||
self.rst.high()
|
|
||||||
sleep(100)
|
|
||||||
|
|
||||||
def __write_data(self, c):
|
|
||||||
self.cs.low()
|
|
||||||
self.rs.high()
|
|
||||||
self.spi.send(c)
|
|
||||||
self.cs.high()
|
|
||||||
|
|
||||||
def __write_command(self, c):
|
|
||||||
self.cs.low()
|
|
||||||
self.rs.low()
|
|
||||||
self.spi.send(c)
|
|
||||||
self.cs.high()
|
|
||||||
|
|
||||||
def write_command(self, cmd, *data):
|
|
||||||
self.__write_command(cmd)
|
|
||||||
for a in data:
|
|
||||||
self.__write_data(a)
|
|
||||||
|
|
||||||
def write_image(self, image):
|
|
||||||
self.write_command(0x2C)
|
|
||||||
self.cs.low()
|
|
||||||
self.rs.high()
|
|
||||||
self.spi.send(image)
|
|
||||||
self.cs.high()
|
|
||||||
|
|
||||||
def clear(self, c=0x00):
|
|
||||||
self.write_command(0x2C)
|
|
||||||
for i in range(128*160):
|
|
||||||
self.__write_data(c)
|
|
||||||
self.__write_data(c)
|
|
||||||
|
|
||||||
def set_backlight(self, on):
|
|
||||||
if (on):
|
|
||||||
self.bl.high()
|
|
||||||
else:
|
|
||||||
self.bl.low()
|
|
||||||
|
|
||||||
def __init__(self, madctl=0xC0):
|
|
||||||
self.rst = Pin('P7', Pin.OUT_PP, Pin.PULL_UP)
|
|
||||||
self.rs = Pin('P8', Pin.OUT_PP, Pin.PULL_UP)
|
|
||||||
self.cs = Pin('P3', Pin.OUT_PP, Pin.PULL_UP)
|
|
||||||
self.bl = Pin('P6', Pin.OUT_PP, Pin.PULL_UP)
|
|
||||||
self.spi = SPI(2, SPI.MASTER, baudrate=22500000, polarity=0, phase=0)
|
|
||||||
|
|
||||||
# LCD init sequence
|
|
||||||
self.reset() # uHW reset
|
|
||||||
self.write_command(0x11) #Sleep exit
|
|
||||||
sleep(120)
|
|
||||||
|
|
||||||
# ST7735R Frame Rate
|
|
||||||
self.write_command(0xB1, 0x01, 0x2C, 0x2D)
|
|
||||||
self.write_command(0xB2,0x01,0x2C,0x2D)
|
|
||||||
self.write_command(0xB3,0x01,0x2C,0x2D,0x01,0x2C,0x2D)
|
|
||||||
|
|
||||||
#Column inversion
|
|
||||||
self.write_command(0xB4, 0x07)
|
|
||||||
|
|
||||||
#ST7735R Power Sequence
|
|
||||||
self.write_command(0xC0,0xA2,0x02,0x84)
|
|
||||||
self.write_command(0xC1,0xC5)
|
|
||||||
self.write_command(0xC2, 0x0A, 0x00)
|
|
||||||
self.write_command(0xC3,0x8A,0x2A)
|
|
||||||
self.write_command(0xC4,0x8A,0xEE)
|
|
||||||
|
|
||||||
# VCOM
|
|
||||||
self.write_command(0xC5, 0x0E)
|
|
||||||
|
|
||||||
# MX, MY, MV, RGB mode
|
|
||||||
self.write_command(0x36, madctl)
|
|
||||||
|
|
||||||
# ST7735R Gamma Sequence
|
|
||||||
self.write_command(0xe0, 0x0f, 0x1a, 0x0f, 0x18, 0x2f, 0x28, 0x20,
|
|
||||||
0x22, 0x1f, 0x1b, 0x23, 0x37, 0x00, 0x07, 0x02, 0x10)
|
|
||||||
self.write_command(0xe1, 0x0f, 0x1b, 0x0f, 0x17, 0x33, 0x2c, 0x29,
|
|
||||||
0x2e, 0x30, 0x30, 0x39, 0x3f, 0x00, 0x07, 0x03, 0x10)
|
|
||||||
|
|
||||||
# Set column address
|
|
||||||
self.write_command(0x2a, 0x00, 0x00, 0x00, 128-1)
|
|
||||||
|
|
||||||
# Set row address
|
|
||||||
self.write_command(0x2b, 0x00, 0x00, 0x00, 160-1)
|
|
||||||
|
|
||||||
# Enable test command
|
|
||||||
self.write_command(0xF0, 0x01)
|
|
||||||
|
|
||||||
# Disable ram power save mode
|
|
||||||
self.write_command(0xF6, 0x00)
|
|
||||||
|
|
||||||
# 65k mode
|
|
||||||
self.write_command(0x3A, 0x05)
|
|
||||||
|
|
||||||
# Display on
|
|
||||||
self.write_command(0x29)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import sensor, time
|
|
||||||
#from lcd import LCD
|
|
||||||
|
|
||||||
# Reset sensor
|
|
||||||
sensor.reset()
|
|
||||||
|
|
||||||
# Sensor settings
|
|
||||||
sensor.set_contrast(2)
|
|
||||||
sensor.set_brightness(0)
|
|
||||||
sensor.set_saturation(2)
|
|
||||||
sensor.set_pixformat(sensor.RGB565)
|
|
||||||
|
|
||||||
# LCD resolution (128x160)
|
|
||||||
sensor.set_framesize(sensor.QQVGA2)
|
|
||||||
|
|
||||||
# Init LCD
|
|
||||||
lcd = LCD()
|
|
||||||
lcd.clear(0x00)
|
|
||||||
lcd.set_backlight(True)
|
|
||||||
|
|
||||||
clock = time.clock()
|
|
||||||
while (True):
|
|
||||||
clock.tick()
|
|
||||||
# Capture a frame a draw it to LCD
|
|
||||||
lcd.write_image(sensor.snapshot())
|
|
||||||
print(clock.fps())
|
|
||||||
27
usr/examples/02-Board-Control/led_control.py
Normal file
27
usr/examples/02-Board-Control/led_control.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
# LED Control Example
|
||||||
|
#
|
||||||
|
# This example shows how to control your OpenMV Cam's built-in LEDs. Use your
|
||||||
|
# smart phone's camera to see the IR LEDs.
|
||||||
|
|
||||||
|
import time
|
||||||
|
from pyb import LED
|
||||||
|
|
||||||
|
red_led = LED(1)
|
||||||
|
green_led = LED(2)
|
||||||
|
blue_led = LED(3)
|
||||||
|
ir_led = LED(4)
|
||||||
|
|
||||||
|
def led_control(x):
|
||||||
|
if (x&1)==0: red_led.off()
|
||||||
|
elif (x&1)==1: red_led.on()
|
||||||
|
if (x&2)==0: green_led.off()
|
||||||
|
elif (x&2)==2: green_led.on()
|
||||||
|
if (x&4)==0: blue_led.off()
|
||||||
|
elif (x&4)==4: blue_led.on()
|
||||||
|
if (x&8)==0: ir_led.off()
|
||||||
|
elif (x&8)==8: ir_led.on()
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
for i in range(16):
|
||||||
|
led_control(i)
|
||||||
|
time.sleep(500)
|
||||||
13
usr/examples/02-Board-Control/pin_control.py
Normal file
13
usr/examples/02-Board-Control/pin_control.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
# Pin Control Example
|
||||||
|
#
|
||||||
|
# This example shows how to use the I/O pins in GPIO mode on your OpenMV Cam.
|
||||||
|
|
||||||
|
from pyb import Pin
|
||||||
|
|
||||||
|
# Connect a switch to pin 0 that will pull it low when the switch is closed.
|
||||||
|
# Pin 1 will then light up.
|
||||||
|
pin0 = Pin('P0', Pin.IN, Pin.PULL_UP)
|
||||||
|
pin1 = Pin('P1', Pin.OUT_PP, Pin.PULL_NONE)
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
pin1.value(not pin0.value())
|
||||||
13
usr/examples/02-Board-Control/read_adc.py
Normal file
13
usr/examples/02-Board-Control/read_adc.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
# Read ADC Example
|
||||||
|
#
|
||||||
|
# This example shows how to read the ADC on your OpenMV Cam.
|
||||||
|
|
||||||
|
import time
|
||||||
|
from pyb import ADC
|
||||||
|
|
||||||
|
adc = ADC("P6") # Must always be "P6".
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
# The ADC has 12-bits of resolution for 4096 values.
|
||||||
|
print("ADC = %fv" % ((adc.read() * 3.3) / 4095))
|
||||||
|
time.sleep(100)
|
||||||
19
usr/examples/02-Board-Control/servo_control.py
Normal file
19
usr/examples/02-Board-Control/servo_control.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# Servo Control Example
|
||||||
|
#
|
||||||
|
# This example shows how to use your OpenMV Cam to control servos.
|
||||||
|
|
||||||
|
import time
|
||||||
|
from pyb import Servo
|
||||||
|
|
||||||
|
s1 = Servo(1) # P7
|
||||||
|
s2 = Servo(2) # P8
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
for i in range(1000):
|
||||||
|
s1.pulse_width(1000 + i)
|
||||||
|
s2.pulse_width(1999 - i)
|
||||||
|
time.sleep(10)
|
||||||
|
for i in range(1000):
|
||||||
|
s1.pulse_width(1999 - i)
|
||||||
|
s2.pulse_width(1000 + i)
|
||||||
|
time.sleep(10)
|
||||||
71
usr/examples/02-Board-Control/spi_control.py
Normal file
71
usr/examples/02-Board-Control/spi_control.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
# SPI Control
|
||||||
|
#
|
||||||
|
# This example shows how to use the SPI bus on your OpenMV Cam to directly
|
||||||
|
# the LCD shield without using the built-in lcd shield driver. You will need
|
||||||
|
# the LCD shield to run this example.
|
||||||
|
|
||||||
|
import sensor, image, time
|
||||||
|
from pyb import Pin, SPI
|
||||||
|
|
||||||
|
cs = Pin("P3", Pin.OUT_OD)
|
||||||
|
rst = Pin("P7", Pin.OUT_PP)
|
||||||
|
rs = Pin("P8", Pin.OUT_PP)
|
||||||
|
# The hardware SPI bus for your OpenMV Cam is always SPI bus 2.
|
||||||
|
spi = SPI(2, SPI.MASTER, baudrate=int(1000000000/66), polarity=0, phase=0)
|
||||||
|
|
||||||
|
def write_command_byte(c):
|
||||||
|
cs.low()
|
||||||
|
rs.low()
|
||||||
|
spi.send(c)
|
||||||
|
cs.high()
|
||||||
|
|
||||||
|
def write_data_byte(c):
|
||||||
|
cs.low()
|
||||||
|
rs.high()
|
||||||
|
spi.send(c)
|
||||||
|
cs.high()
|
||||||
|
|
||||||
|
def write_command(c, *data):
|
||||||
|
write_command_byte(c)
|
||||||
|
if data:
|
||||||
|
for d in data: write_data_byte(d)
|
||||||
|
|
||||||
|
def write_image(img):
|
||||||
|
cs.low()
|
||||||
|
rs.high()
|
||||||
|
spi.send(img)
|
||||||
|
cs.high()
|
||||||
|
|
||||||
|
# Reset the LCD.
|
||||||
|
rst.low()
|
||||||
|
time.sleep(100)
|
||||||
|
rst.high()
|
||||||
|
time.sleep(100)
|
||||||
|
|
||||||
|
write_command(0x11) # Sleep Exit
|
||||||
|
time.sleep(120)
|
||||||
|
|
||||||
|
# Memory Data Access Control
|
||||||
|
write_command(0x36, 0xC0)
|
||||||
|
|
||||||
|
# Interface Pixel Format
|
||||||
|
write_command(0x3A, 0x05)
|
||||||
|
|
||||||
|
# Display On
|
||||||
|
write_command(0x29)
|
||||||
|
|
||||||
|
sensor.reset() # Initialize the camera sensor.
|
||||||
|
sensor.set_pixformat(sensor.RGB565) # must be this
|
||||||
|
sensor.set_framesize(sensor.QQVGA2) # must be this
|
||||||
|
sensor.skip_frames(10) # Let new settings take affect.
|
||||||
|
clock = time.clock() # Tracks FPS.
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
clock.tick() # Track elapsed milliseconds between snapshots().
|
||||||
|
img = sensor.snapshot() # Take a picture and return the image.
|
||||||
|
|
||||||
|
write_command(0x2C) # Write image command...
|
||||||
|
write_image(img)
|
||||||
|
|
||||||
|
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
|
||||||
|
# connected to your computer. The FPS should increase once disconnected.
|
||||||
17
usr/examples/02-Board-Control/uart_control.py
Normal file
17
usr/examples/02-Board-Control/uart_control.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# UART Control
|
||||||
|
#
|
||||||
|
# This example shows how to use the serial port on your OpenMV Cam. Attach pin
|
||||||
|
# P4 to the serial input of a serial LCD screen to see "Hello World!" printed
|
||||||
|
# on the serial LCD display.
|
||||||
|
|
||||||
|
import time
|
||||||
|
from pyb import UART
|
||||||
|
|
||||||
|
# Always pass UART 3 for the UART number for your OpenMV Cam.
|
||||||
|
# The second argument is the UART baud rate. For a more advanced UART control
|
||||||
|
# example see the BLE-Shield driver.
|
||||||
|
uart = UART(3, 19200)
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
uart.write("Hello World!\r")
|
||||||
|
time.sleep(1000)
|
||||||
@ -1,9 +1,19 @@
|
|||||||
|
# Color Drawing Example
|
||||||
|
#
|
||||||
|
# This example shows off your OpenMV Cam's built-in drawing capabilities. This
|
||||||
|
# example was originally a test but serves as good reference code. Please put
|
||||||
|
# your IDE into non-JPEG mode to see the best drawing quality.
|
||||||
|
|
||||||
import sensor, image, time
|
import sensor, image, time
|
||||||
|
|
||||||
sensor.reset()
|
sensor.reset()
|
||||||
sensor.set_framesize(sensor.QVGA)
|
sensor.set_framesize(sensor.QVGA)
|
||||||
|
|
||||||
# All drawing functions use the same code to pass color.
|
# All drawing functions use the same code to pass color.
|
||||||
# So we just need to test one function.
|
# So we just need to test one function.
|
||||||
|
|
||||||
while(True):
|
while(True):
|
||||||
|
|
||||||
# Test Draw Line (GRAYSCALE)
|
# Test Draw Line (GRAYSCALE)
|
||||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
sensor.set_pixformat(sensor.GRAYSCALE)
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@ -13,6 +23,7 @@ while(True):
|
|||||||
img.draw_line([i, 0, i, img.height()-1], color = int(c))
|
img.draw_line([i, 0, i, img.height()-1], color = int(c))
|
||||||
sensor.snapshot()
|
sensor.snapshot()
|
||||||
time.sleep(1000)
|
time.sleep(1000)
|
||||||
|
|
||||||
# Test Draw Line (RGB565)
|
# Test Draw Line (RGB565)
|
||||||
sensor.set_pixformat(sensor.RGB565)
|
sensor.set_pixformat(sensor.RGB565)
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@ -22,6 +33,7 @@ while(True):
|
|||||||
img.draw_line([i, 0, i, img.height()-1], color = [int(c), 0, 0])
|
img.draw_line([i, 0, i, img.height()-1], color = [int(c), 0, 0])
|
||||||
sensor.snapshot()
|
sensor.snapshot()
|
||||||
time.sleep(1000)
|
time.sleep(1000)
|
||||||
|
|
||||||
# Test Draw Line (RGB565)
|
# Test Draw Line (RGB565)
|
||||||
sensor.set_pixformat(sensor.RGB565)
|
sensor.set_pixformat(sensor.RGB565)
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@ -31,6 +43,7 @@ while(True):
|
|||||||
img.draw_line([i, 0, i, img.height()-1], color = [0, int(c), 0])
|
img.draw_line([i, 0, i, img.height()-1], color = [0, int(c), 0])
|
||||||
sensor.snapshot()
|
sensor.snapshot()
|
||||||
time.sleep(1000)
|
time.sleep(1000)
|
||||||
|
|
||||||
# Test Draw Line (RGB565)
|
# Test Draw Line (RGB565)
|
||||||
sensor.set_pixformat(sensor.RGB565)
|
sensor.set_pixformat(sensor.RGB565)
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@ -1,7 +1,16 @@
|
|||||||
|
# Crazy Drawing Example
|
||||||
|
#
|
||||||
|
# This example shows off your OpenMV Cam's built-in drawing capabilities. This
|
||||||
|
# example was originally a test but serves as good reference code. Please put
|
||||||
|
# your IDE into non-JPEG mode to see the best drawing quality.
|
||||||
|
|
||||||
import pyb, sensor, image, math
|
import pyb, sensor, image, math
|
||||||
|
|
||||||
sensor.reset()
|
sensor.reset()
|
||||||
sensor.set_framesize(sensor.QVGA)
|
sensor.set_framesize(sensor.QVGA)
|
||||||
|
|
||||||
while(True):
|
while(True):
|
||||||
|
|
||||||
# Test Set Pixel
|
# Test Set Pixel
|
||||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
sensor.set_pixformat(sensor.GRAYSCALE)
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@ -17,6 +26,7 @@ while(True):
|
|||||||
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
|
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
|
||||||
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
|
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
|
||||||
img.set_pixel(x, y, (255, 255, 255))
|
img.set_pixel(x, y, (255, 255, 255))
|
||||||
|
|
||||||
# Test Draw Line
|
# Test Draw Line
|
||||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
sensor.set_pixformat(sensor.GRAYSCALE)
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@ -36,6 +46,7 @@ while(True):
|
|||||||
x1 = (pyb.rng() % (2*img.width())) - (img.width()//2)
|
x1 = (pyb.rng() % (2*img.width())) - (img.width()//2)
|
||||||
y1 = (pyb.rng() % (2*img.height())) - (img.height()//2)
|
y1 = (pyb.rng() % (2*img.height())) - (img.height()//2)
|
||||||
img.draw_line([x0, y0, x1, y1])
|
img.draw_line([x0, y0, x1, y1])
|
||||||
|
|
||||||
# Test Draw Rectangle
|
# Test Draw Rectangle
|
||||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
sensor.set_pixformat(sensor.GRAYSCALE)
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@ -72,6 +83,7 @@ while(True):
|
|||||||
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
|
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
|
||||||
r = (pyb.rng() % (img.width() if (img.width() > img.height()) else img.height()))
|
r = (pyb.rng() % (img.width() if (img.width() > img.height()) else img.height()))
|
||||||
img.draw_circle(x, y, r)
|
img.draw_circle(x, y, r)
|
||||||
|
|
||||||
# Test Draw String
|
# Test Draw String
|
||||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
sensor.set_pixformat(sensor.GRAYSCALE)
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@ -87,6 +99,7 @@ while(True):
|
|||||||
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
|
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
|
||||||
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
|
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
|
||||||
img.draw_string(x, y, "Hello\nWorld!")
|
img.draw_string(x, y, "Hello\nWorld!")
|
||||||
|
|
||||||
# Test Draw Cross
|
# Test Draw Cross
|
||||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
sensor.set_pixformat(sensor.GRAYSCALE)
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@ -102,6 +115,7 @@ while(True):
|
|||||||
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
|
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
|
||||||
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
|
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
|
||||||
img.draw_cross(x, y)
|
img.draw_cross(x, y)
|
||||||
|
|
||||||
# Test Draw Keypoints
|
# Test Draw Keypoints
|
||||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
sensor.set_pixformat(sensor.GRAYSCALE)
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@ -1,27 +0,0 @@
|
|||||||
import sensor
|
|
||||||
from math import sin, cos
|
|
||||||
|
|
||||||
sensor.reset()
|
|
||||||
# Set sensor settings
|
|
||||||
sensor.set_brightness(0)
|
|
||||||
sensor.set_saturation(0)
|
|
||||||
sensor.set_gainceiling(8)
|
|
||||||
sensor.set_contrast(2)
|
|
||||||
|
|
||||||
# Set sensor pixel format
|
|
||||||
sensor.set_framesize(sensor.QVGA)
|
|
||||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
|
||||||
|
|
||||||
# Skip a few frames to allow the sensor settle down
|
|
||||||
for i in range(0, 3):
|
|
||||||
image = sensor.snapshot()
|
|
||||||
|
|
||||||
x = int(320/2)
|
|
||||||
y = int(240/2)
|
|
||||||
image.draw_circle(x, y, 50)
|
|
||||||
image.draw_line((x, y, int(50*sin(45))+x, int(50*cos(45))+y))
|
|
||||||
image.draw_rectangle((x-60, y-60, 120, 120))
|
|
||||||
image.draw_string(10, 10, "OpenMV", color = 0xFF)
|
|
||||||
image.draw_string(10, 25, "Hello World", color = 0xFF)
|
|
||||||
# Flush buffer...
|
|
||||||
sensor.snapshot()
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
import sensor, time
|
|
||||||
|
|
||||||
sensor.reset()
|
|
||||||
# Set sensor settings
|
|
||||||
sensor.set_brightness(0)
|
|
||||||
sensor.set_saturation(0)
|
|
||||||
sensor.set_gainceiling(8)
|
|
||||||
sensor.set_contrast(2)
|
|
||||||
|
|
||||||
# Set sensor pixel format
|
|
||||||
sensor.set_framesize(sensor.QVGA)
|
|
||||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
|
||||||
|
|
||||||
# Capture image and set pixels
|
|
||||||
image = sensor.snapshot()
|
|
||||||
for y in range(0, 240):
|
|
||||||
for x in range(0, 320):
|
|
||||||
image.set_pixel(x, y, 0xFF)
|
|
||||||
|
|
||||||
time.sleep(1000)
|
|
||||||
|
|
||||||
# Switch to RGB565
|
|
||||||
sensor.set_pixformat(sensor.RGB565)
|
|
||||||
|
|
||||||
# Capture image and set pixels
|
|
||||||
image = sensor.snapshot()
|
|
||||||
for y in range(0, 240):
|
|
||||||
for x in range(0, 320):
|
|
||||||
image.set_pixel(x, y, (0xFF, 0x00, 0x00))
|
|
||||||
49
usr/examples/04-Image-Filters/advanced_frame_differencing.py
Normal file
49
usr/examples/04-Image-Filters/advanced_frame_differencing.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
# Advanced Frame Differencing Example
|
||||||
|
#
|
||||||
|
# Note: You will need an SD card to run this example.
|
||||||
|
#
|
||||||
|
# This example demonstrates using frame differencing with your OpenMV Cam. This
|
||||||
|
# example is advanced because it preforms a background update to deal with the
|
||||||
|
# backgound image changing overtime.
|
||||||
|
|
||||||
|
import sensor, image, pyb, os, time
|
||||||
|
|
||||||
|
BG_UPDATE_FRAMES = 50 # How many frames before blending.
|
||||||
|
BG_UPDATE_BLEND = 128 # How much to blend by... ([0-256]==[0.0-1.0]).
|
||||||
|
|
||||||
|
sensor.reset() # Initialize the camera sensor.
|
||||||
|
sensor.set_pixformat(sensor.GRAYSCALE) # or sensor.RGB565
|
||||||
|
sensor.set_framesize(sensor.QVGA) # or sensor.QQVGA (or others)
|
||||||
|
sensor.skip_frames(10) # Let new settings take affect.
|
||||||
|
sensor.set_whitebal(False) # Turn off white balance.
|
||||||
|
clock = time.clock() # Tracks FPS.
|
||||||
|
|
||||||
|
if not "temp" in os.listdir(): os.mkdir("temp") # Make a temp directory
|
||||||
|
|
||||||
|
print("About to save background image...")
|
||||||
|
sensor.skip_frames(60) # Give the user time to get ready.
|
||||||
|
sensor.snapshot().save("temp/bg.bmp")
|
||||||
|
print("Saved background image - Now frame differencing!")
|
||||||
|
|
||||||
|
frame_count = 0
|
||||||
|
while(True):
|
||||||
|
clock.tick() # Track elapsed milliseconds between snapshots().
|
||||||
|
img = sensor.snapshot() # Take a picture and return the image.
|
||||||
|
|
||||||
|
frame_count += 1
|
||||||
|
if frame_count > BG_UPDATE_FRAMES:
|
||||||
|
frame_count = 0
|
||||||
|
# Blend in new frame. We're doing 256-alpha here because we want to
|
||||||
|
# blend the new frame into the backgound. Not the background into the
|
||||||
|
# new frame which would be just alpha. Blend replaces each pixel by
|
||||||
|
# ((NEW*(alpha))+(OLD*(256-alpha)))/256. So, a low alpha results in
|
||||||
|
# low blending of the new image while a high alpha results in high
|
||||||
|
# blending of the new image. We need to reverse that for this update.
|
||||||
|
img.blend("temp/bg.bmp", alpha=(256-BG_UPDATE_BLEND))
|
||||||
|
img.save("temp/bg.bmp")
|
||||||
|
|
||||||
|
# Replace the image with the "abs(NEW-OLD)" frame difference.
|
||||||
|
img.difference("temp/bg.bmp")
|
||||||
|
|
||||||
|
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
|
||||||
|
# connected to your computer. The FPS should increase once disconnected.
|
||||||
33
usr/examples/04-Image-Filters/basic_frame_differencing.py
Normal file
33
usr/examples/04-Image-Filters/basic_frame_differencing.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
# Basic Frame Differencing Example
|
||||||
|
#
|
||||||
|
# Note: You will need an SD card to run this example.
|
||||||
|
#
|
||||||
|
# This example demonstrates using frame differencing with your OpenMV Cam. It's
|
||||||
|
# called basic frame differencing because there's no background image update.
|
||||||
|
# So, as time passes the background image may change resulting in issues.
|
||||||
|
|
||||||
|
import sensor, image, pyb, os, time
|
||||||
|
|
||||||
|
sensor.reset() # Initialize the camera sensor.
|
||||||
|
sensor.set_pixformat(sensor.RGB565) # or sensor.GRAYSCALE
|
||||||
|
sensor.set_framesize(sensor.QVGA) # or sensor.QQVGA (or others)
|
||||||
|
sensor.skip_frames(10) # Let new settings take affect.
|
||||||
|
sensor.set_whitebal(False) # Turn off white balance.
|
||||||
|
clock = time.clock() # Tracks FPS.
|
||||||
|
|
||||||
|
if not "temp" in os.listdir(): os.mkdir("temp") # Make a temp directory
|
||||||
|
|
||||||
|
print("About to save background image...")
|
||||||
|
sensor.skip_frames(60) # Give the user time to get ready.
|
||||||
|
sensor.snapshot().save("temp/bg.bmp")
|
||||||
|
print("Saved background image - Now frame differencing!")
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
clock.tick() # Track elapsed milliseconds between snapshots().
|
||||||
|
img = sensor.snapshot() # Take a picture and return the image.
|
||||||
|
|
||||||
|
# Replace the image with the "abs(NEW-OLD)" frame difference.
|
||||||
|
img.difference("temp/bg.bmp")
|
||||||
|
|
||||||
|
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
|
||||||
|
# connected to your computer. The FPS should increase once disconnected.
|
||||||
@ -3,21 +3,31 @@
|
|||||||
# This example demonstrates using the morph function on an image to do edge
|
# This example demonstrates using the morph function on an image to do edge
|
||||||
# detection and then thresholding and filtering that image afterwards.
|
# detection and then thresholding and filtering that image afterwards.
|
||||||
|
|
||||||
import sensor, image
|
import sensor, image, time
|
||||||
|
|
||||||
kernel_size = 1 # kernel width = (size*2)+1, kernel height = (size*2)+1
|
kernel_size = 1 # kernel width = (size*2)+1, kernel height = (size*2)+1
|
||||||
kernel = [-1, -1, -1,\
|
kernel = [-1, -1, -1,\
|
||||||
-1, +8, -1,\
|
-1, +8, -1,\
|
||||||
-1, -1, -1]
|
-1, -1, -1]
|
||||||
# This is a high pass filter kernel. ee here for more kernels:
|
# This is a high pass filter kernel. see here for more kernels:
|
||||||
# http://www.fmwconcepts.com/imagemagick/digital_image_filtering.pdf
|
# http://www.fmwconcepts.com/imagemagick/digital_image_filtering.pdf
|
||||||
thresholds = [(100, 255)] # grayscale thresholds
|
thresholds = [(100, 255)] # grayscale thresholds
|
||||||
|
|
||||||
sensor.reset()
|
sensor.reset() # Initialize the camera sensor.
|
||||||
sensor.set_framesize(sensor.QQVGA) # smaller resolution to go faster
|
sensor.set_pixformat(sensor.GRAYSCALE) # or sensor.RGB565
|
||||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
sensor.set_framesize(sensor.QQVGA) # or sensor.QVGA (or others)
|
||||||
|
sensor.skip_frames(10) # Let new settings take affect.
|
||||||
|
clock = time.clock() # Tracks FPS.
|
||||||
|
|
||||||
while(True):
|
while(True):
|
||||||
img = sensor.snapshot()
|
clock.tick() # Track elapsed milliseconds between snapshots().
|
||||||
|
img = sensor.snapshot() # Take a picture and return the image.
|
||||||
|
|
||||||
img.morph(kernel_size, kernel)
|
img.morph(kernel_size, kernel)
|
||||||
img.binary(thresholds)
|
img.binary(thresholds)
|
||||||
img.erode(1, threshold = 2) # erode pixels with less than 2 neighbors
|
|
||||||
|
# Erode pixels with less than 2 neighbors using a 3x3 image kernel
|
||||||
|
img.erode(1, threshold = 2)
|
||||||
|
|
||||||
|
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
|
||||||
|
# connected to your computer. The FPS should increase once disconnected.
|
||||||
|
|||||||
@ -1,30 +0,0 @@
|
|||||||
# Filters are image functions that process a single line at a time.
|
|
||||||
# Since filters process lines on the fly, they run at sensor speed.
|
|
||||||
# Note: Only one filter can be enabled at a time.
|
|
||||||
import time, sensor
|
|
||||||
|
|
||||||
# Reset sensor
|
|
||||||
sensor.reset()
|
|
||||||
|
|
||||||
# Set sensor settings
|
|
||||||
sensor.set_contrast(1)
|
|
||||||
sensor.set_brightness(3)
|
|
||||||
sensor.set_saturation(3)
|
|
||||||
sensor.set_gainceiling(16)
|
|
||||||
sensor.set_framesize(sensor.QVGA)
|
|
||||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
|
||||||
|
|
||||||
# Enable BW filter
|
|
||||||
sensor.set_image_filter(sensor.FILTER_BW, lower=200, upper=255)
|
|
||||||
|
|
||||||
# Enable SKIN filter (Note doesn't work very well on RGB)
|
|
||||||
#sensor.set_image_filter(sensor.FILTER_SKIN)
|
|
||||||
|
|
||||||
# FPS clock
|
|
||||||
clock = time.clock()
|
|
||||||
while (True):
|
|
||||||
clock.tick()
|
|
||||||
img = sensor.snapshot()
|
|
||||||
# Draw FPS
|
|
||||||
# Note: Actual FPS is higher, the IDE slows down streaming.
|
|
||||||
img.draw_string(0, 0, "FPS:%.2f"%(clock.fps()))
|
|
||||||
25
usr/examples/04-Image-Filters/mean_filter.py
Normal file
25
usr/examples/04-Image-Filters/mean_filter.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# Mean Filter Example
|
||||||
|
#
|
||||||
|
# This example shows off mean filtering. Mean filtering is your standard average
|
||||||
|
# filter in a NxN neighborhood. Mean filtering removes noise in the image by
|
||||||
|
# bluring everything. But, it's the fastest kernel filter operation.
|
||||||
|
|
||||||
|
import sensor, image, time
|
||||||
|
|
||||||
|
sensor.reset() # Initialize the camera sensor.
|
||||||
|
sensor.set_pixformat(sensor.RGB565) # or sensor.GRAYSCALE
|
||||||
|
sensor.set_framesize(sensor.QQVGA) # or sensor.QVGA (or others)
|
||||||
|
sensor.skip_frames(10) # Let new settings take affect.
|
||||||
|
clock = time.clock() # Tracks FPS.
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
clock.tick() # Track elapsed milliseconds between snapshots().
|
||||||
|
img = sensor.snapshot() # Take a picture and return the image.
|
||||||
|
|
||||||
|
# The only argument is the kernel size. N coresponds to a ((N*2)+1)^2
|
||||||
|
# kernel size. E.g. 1 == 3x3 kernel, 2 == 5x5 kernel, etc. Note: You
|
||||||
|
# shouldn't ever need to use a value bigger than 2.
|
||||||
|
img.mean(1)
|
||||||
|
|
||||||
|
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
|
||||||
|
# connected to your computer. The FPS should increase once disconnected.
|
||||||
@ -1,22 +0,0 @@
|
|||||||
import sensor, time
|
|
||||||
# Reset sensor
|
|
||||||
sensor.reset()
|
|
||||||
|
|
||||||
# Sensor settings
|
|
||||||
sensor.set_contrast(1)
|
|
||||||
sensor.set_gainceiling(16)
|
|
||||||
sensor.set_framesize(sensor.QCIF)
|
|
||||||
sensor.set_pixformat(sensor.GRAYSCALE)
|
|
||||||
|
|
||||||
# FPS clock
|
|
||||||
clock = time.clock()
|
|
||||||
while (True):
|
|
||||||
clock.tick()
|
|
||||||
# Capture snapshot
|
|
||||||
img = sensor.snapshot()
|
|
||||||
|
|
||||||
# Run median filter (r=3)
|
|
||||||
img.median(size = 3)
|
|
||||||
|
|
||||||
# Note: Actual FPS is higher, streaming the FB makes it slower.
|
|
||||||
print(clock.fps())
|
|
||||||
27
usr/examples/04-Image-Filters/median_filter.py
Normal file
27
usr/examples/04-Image-Filters/median_filter.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
# Median Filter Example
|
||||||
|
#
|
||||||
|
# This example shows off median filtering. Median filtering replaces every pixel
|
||||||
|
# with the median value of it's NxN neighborhood. Median filtering is good for
|
||||||
|
# removing noise in the image while preserving edges.
|
||||||
|
|
||||||
|
import sensor, image, time
|
||||||
|
|
||||||
|
sensor.reset() # Initialize the camera sensor.
|
||||||
|
sensor.set_pixformat(sensor.RGB565) # or sensor.GRAYSCALE
|
||||||
|
sensor.set_framesize(sensor.QQVGA) # or sensor.QVGA (or others)
|
||||||
|
sensor.skip_frames(10) # Let new settings take affect.
|
||||||
|
clock = time.clock() # Tracks FPS.
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
clock.tick() # Track elapsed milliseconds between snapshots().
|
||||||
|
img = sensor.snapshot() # Take a picture and return the image.
|
||||||
|
|
||||||
|
# The first argument to the median filter is the kernel size, it can be
|
||||||
|
# either 0, 1, or 2 for a 1x1, 3x3, or 5x5 kernel respectively. The second
|
||||||
|
# argument "percentile" is the percentile number to choose from the NxN
|
||||||
|
# neighborhood. 0.5 is the median, 0.25 is the lower quartile, and 0.75
|
||||||
|
# would be the upper quartile.
|
||||||
|
img.median(1, percentile=0.5)
|
||||||
|
|
||||||
|
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
|
||||||
|
# connected to your computer. The FPS should increase once disconnected.
|
||||||
27
usr/examples/04-Image-Filters/midpoint_filter.py
Normal file
27
usr/examples/04-Image-Filters/midpoint_filter.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
# Midpoint Filter Example
|
||||||
|
#
|
||||||
|
# This example shows off midpoint filtering. Midpoint filtering replaces each
|
||||||
|
# pixel by the average of the min and max pixel values for a NxN neighborhood.
|
||||||
|
|
||||||
|
import sensor, image, time
|
||||||
|
|
||||||
|
sensor.reset() # Initialize the camera sensor.
|
||||||
|
sensor.set_pixformat(sensor.RGB565) # or sensor.GRAYSCALE
|
||||||
|
sensor.set_framesize(sensor.QQVGA) # or sensor.QVGA (or others)
|
||||||
|
sensor.skip_frames(10) # Let new settings take affect.
|
||||||
|
clock = time.clock() # Tracks FPS.
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
clock.tick() # Track elapsed milliseconds between snapshots().
|
||||||
|
img = sensor.snapshot() # Take a picture and return the image.
|
||||||
|
|
||||||
|
# The first argument is the kernel size. N coresponds to a ((N*2)+1)^2
|
||||||
|
# kernel size. E.g. 1 == 3x3 kernel, 2 == 5x5 kernel, etc. Note: You
|
||||||
|
# shouldn't ever need to use a value bigger than 2. The "bias" argument
|
||||||
|
# lets you select between min and max blending. 0.5 == midpoint filter,
|
||||||
|
# 0.0 == min filter, and 1.0 == max filter. Note that the min filter
|
||||||
|
# makes images darker while the max filter makes images lighter.
|
||||||
|
img.midpoint(1, bias=0.5)
|
||||||
|
|
||||||
|
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
|
||||||
|
# connected to your computer. The FPS should increase once disconnected.
|
||||||
25
usr/examples/04-Image-Filters/mode_filter.py
Normal file
25
usr/examples/04-Image-Filters/mode_filter.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# Mode Filter Example
|
||||||
|
#
|
||||||
|
# This example shows off mode filtering. Mode filtering is a highly non-linear
|
||||||
|
# operation which replaces each pixel with the mode of the NxN neighborhood
|
||||||
|
# of pixels around it. Avoid using the mode filter on RGB565 images. It will
|
||||||
|
# cause artifacts on image edges...
|
||||||
|
|
||||||
|
import sensor, image, time
|
||||||
|
|
||||||
|
sensor.reset() # Initialize the camera sensor.
|
||||||
|
sensor.set_pixformat(sensor.GRAYSCALE) # or sensor.RGB565
|
||||||
|
sensor.set_framesize(sensor.QQVGA) # or sensor.QVGA (or others)
|
||||||
|
sensor.skip_frames(10) # Let new settings take affect.
|
||||||
|
clock = time.clock() # Tracks FPS.
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
clock.tick() # Track elapsed milliseconds between snapshots().
|
||||||
|
img = sensor.snapshot() # Take a picture and return the image.
|
||||||
|
|
||||||
|
# The only argument to the median filter is the kernel size, it can be
|
||||||
|
# either 0, 1, or 2 for a 1x1, 3x3, or 5x5 kernel respectively.
|
||||||
|
img.mode(1)
|
||||||
|
|
||||||
|
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
|
||||||
|
# connected to your computer. The FPS should increase once disconnected.
|
||||||
27
usr/examples/04-Image-Filters/sharpen_filter.py
Normal file
27
usr/examples/04-Image-Filters/sharpen_filter.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
# Sharpen Filter Example:
|
||||||
|
#
|
||||||
|
# This example demonstrates using morph to sharpen images.
|
||||||
|
|
||||||
|
import sensor, image, time
|
||||||
|
|
||||||
|
kernel_size = 1 # kernel width = (size*2)+1, kernel height = (size*2)+1
|
||||||
|
kernel = [-1, -1, -1,\
|
||||||
|
-1, +9, -1,\
|
||||||
|
-1, -1, -1]
|
||||||
|
# This is a sharpen filter kernel.
|
||||||
|
|
||||||
|
sensor.reset() # Initialize the camera sensor.
|
||||||
|
sensor.set_pixformat(sensor.GRAYSCALE) # or sensor.RGB565
|
||||||
|
sensor.set_framesize(sensor.QQVGA) # or sensor.QVGA (or others)
|
||||||
|
sensor.skip_frames(10) # Let new settings take affect.
|
||||||
|
clock = time.clock() # Tracks FPS.
|
||||||
|
|
||||||
|
while(True):
|
||||||
|
clock.tick() # Track elapsed milliseconds between snapshots().
|
||||||
|
img = sensor.snapshot() # Take a picture and return the image.
|
||||||
|
|
||||||
|
# Run the kernel on every pixel of the image.
|
||||||
|
img.morph(kernel_size, kernel)
|
||||||
|
|
||||||
|
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
|
||||||
|
# connected to your computer. The FPS should increase once disconnected.
|
||||||
33
usr/examples/05-Snapshot/emboss_snapshot.py
Normal file
33
usr/examples/05-Snapshot/emboss_snapshot.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
# Emboss Snapshot Example
|
||||||
|
#
|
||||||
|
# Note: You will need an SD card to run this example.
|
||||||
|
#
|
||||||
|
# You can use your OpenMV Cam to save modified image files.
|
||||||
|
|
||||||
|
import sensor, image, pyb
|
||||||
|
|
||||||
|
RED_LED_PIN = 1
|
||||||
|
BLUE_LED_PIN = 3
|
||||||
|
|
||||||
|
sensor.reset() # Initialize the camera sensor.
|
||||||
|
sensor.set_pixformat(sensor.RGB565) # or sensor.GRAYSCALE
|
||||||
|
sensor.set_framesize(sensor.QVGA) # or sensor.QQVGA (or others)
|
||||||
|
sensor.skip_frames(10) # Let new settings take affect.
|
||||||
|
|
||||||
|
pyb.LED(RED_LED_PIN).on()
|
||||||
|
sensor.skip_frames(30) # Give the user time to get ready.
|
||||||
|
|
||||||
|
pyb.LED(RED_LED_PIN).off()
|
||||||
|
pyb.LED(BLUE_LED_PIN).on()
|
||||||
|
|
||||||
|
print("You're on camera!")
|
||||||
|
img = sensor.snapshot()
|
||||||
|
|
||||||
|
img.morph(1, [+2, +1, +0,\
|
||||||
|
+1, +1, -1,\
|
||||||
|
+0, -1, -2]) # Emboss the image.
|
||||||
|
|
||||||
|
img.save("example.jpg") # or "example.bmp" (or others)
|
||||||
|
|
||||||
|
pyb.LED(BLUE_LED_PIN).off()
|
||||||
|
print("Done! Reset the camera to see the saved image.")
|
||||||
@ -35,9 +35,9 @@ while(True):
|
|||||||
img.difference("temp/bg.bmp")
|
img.difference("temp/bg.bmp")
|
||||||
stats = img.statistics()
|
stats = img.statistics()
|
||||||
# Stats 5 is the max of the lighting color channel. The below code
|
# Stats 5 is the max of the lighting color channel. The below code
|
||||||
# triggers when the lighting max for the whole image goes above 10.
|
# triggers when the lighting max for the whole image goes above 20.
|
||||||
# The lighting difference maximum should be zero normally.
|
# The lighting difference maximum should be zero normally.
|
||||||
if (stats[5] > 10):
|
if (stats[5] > 20):
|
||||||
diff -= 1
|
diff -= 1
|
||||||
|
|
||||||
pyb.LED(BLUE_LED_PIN).off()
|
pyb.LED(BLUE_LED_PIN).off()
|
||||||
|
|||||||
@ -39,9 +39,9 @@ while(True):
|
|||||||
img.difference("temp/bg.bmp")
|
img.difference("temp/bg.bmp")
|
||||||
stats = img.statistics()
|
stats = img.statistics()
|
||||||
# Stats 5 is the max of the lighting color channel. The below code
|
# Stats 5 is the max of the lighting color channel. The below code
|
||||||
# triggers when the lighting max for the whole image goes above 10.
|
# triggers when the lighting max for the whole image goes above 20.
|
||||||
# The lighting difference maximum should be zero normally.
|
# The lighting difference maximum should be zero normally.
|
||||||
if (stats[5] > 10):
|
if (stats[5] > 20):
|
||||||
diff -= 1
|
diff -= 1
|
||||||
|
|
||||||
g = gif.Gif("example-%d.gif" % pyb.rng(), loop=True)
|
g = gif.Gif("example-%d.gif" % pyb.rng(), loop=True)
|
||||||
|
|||||||
@ -40,9 +40,9 @@ while(True):
|
|||||||
img.difference("temp/bg.bmp")
|
img.difference("temp/bg.bmp")
|
||||||
stats = img.statistics()
|
stats = img.statistics()
|
||||||
# Stats 5 is the max of the lighting color channel. The below code
|
# Stats 5 is the max of the lighting color channel. The below code
|
||||||
# triggers when the lighting max for the whole image goes above 10.
|
# triggers when the lighting max for the whole image goes above 20.
|
||||||
# The lighting difference maximum should be zero normally.
|
# The lighting difference maximum should be zero normally.
|
||||||
if (stats[5] > 10):
|
if (stats[5] > 20):
|
||||||
diff -= 1
|
diff -= 1
|
||||||
|
|
||||||
m = mjpeg.Mjpeg("example-%d.mjpeg" % pyb.rng())
|
m = mjpeg.Mjpeg("example-%d.mjpeg" % pyb.rng())
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
# Example 1 - LCD Shield Demo
|
# LCD Example
|
||||||
#
|
#
|
||||||
# Note: To run this example you will need a LCD Shield for your OpenMV Cam.
|
# Note: To run this example you will need a LCD Shield for your OpenMV Cam.
|
||||||
#
|
#
|
||||||
|
|||||||
@ -38,7 +38,7 @@ class BLE:
|
|||||||
((r[4]-48)*100)+\
|
((r[4]-48)*100)+\
|
||||||
((r[5]-48)*10)+\
|
((r[5]-48)*10)+\
|
||||||
((r[6]-48)*1)
|
((r[6]-48)*1)
|
||||||
if not l: return ""
|
if not l: return None
|
||||||
if l==1 or l==2: raise OSError("Response corrupted!")
|
if l==1 or l==2: raise OSError("Response corrupted!")
|
||||||
response=self.uart.read(l-2)
|
response=self.uart.read(l-2)
|
||||||
if self.uart.readchar()!=13: raise OSError("Response corrupted!")
|
if self.uart.readchar()!=13: raise OSError("Response corrupted!")
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user