scripts/examples: Add Giga H7 examples.

This commit is contained in:
iabdalkader 2023-10-09 12:49:31 +02:00
parent a49d03a389
commit 67ad841ff7
34 changed files with 1257 additions and 0 deletions

View File

@ -0,0 +1,13 @@
# ADC Read Example.
#
# This example shows how to use the ADC to read an analog pin.
import time
from pyb import ADC
adc = ADC("A0")
while True:
# The ADC has 12-bits of resolution for 4096 values.
print("ADC = %fv" % ((adc.read() * 3.3) / 4095))
time.sleep_ms(100)

View File

@ -0,0 +1,11 @@
# ADC Internal Channels Example
#
# This example shows how to read internal ADC channels.
import pyb
adc = pyb.ADCAll(12)
print(
"VREF = %.1fv VBAT = %.1fv Temp = %d"
% (adc.read_core_vref(), adc.read_core_vbat(), adc.read_core_temp())
)

View File

@ -0,0 +1,37 @@
# CAN Example
#
# This example demonstrates CAN communications between two cameras.
# NOTE: you need two CAN transceiver shields and DB9 cable to run this example.
import time
import omv
from pyb import CAN
# NOTE: Set to False on receiving node.
TRANSMITTER = True
can = CAN(1, CAN.NORMAL, baudrate=125_000, sample_point=75)
# NOTE: uncomment to set bit timing manually, for example:
# can.init(CAN.NORMAL, prescaler=32, sjw=1, bs1=8, bs2=3)
can.restart()
if TRANSMITTER:
while True:
# Send message with id 1
can.send("Hello", 1)
time.sleep_ms(1000)
else:
# Runs on the receiving node.
if omv.board_type() == "H7": # FDCAN
# Set a filter to receive messages with id=1 -> 4
# Filter index, mode (RANGE, DUAL or MASK), FIFO (0 or 1), params
can.setfilter(0, CAN.RANGE, 0, (1, 4))
else:
# Set a filter to receive messages with id=1, 2, 3 and 4
# Filter index, mode (LIST16, etc..), FIFO (0 or 1), params
can.setfilter(0, CAN.LIST16, 0, (1, 2, 3, 4))
while True:
# Receive messages on FIFO 0
print(can.recv(0, timeout=10000))

View File

@ -0,0 +1,32 @@
# CPU frequency scaling example.
#
# This example shows how to use the cpufreq module to change the CPU frequency on the fly.
import sensor
import image
import time
import cpufreq
sensor.reset() # Reset and initialize the sensor.
sensor.set_pixformat(sensor.GRAYSCALE) # Set pixel format to RGB565 (or GRAYSCALE)
sensor.set_framesize(sensor.QVGA) # Set frame size to QVGA (320x240)
clock = time.clock() # Create a clock object to track the FPS.
def test_image_processing():
for i in range(0, 50):
clock.tick() # Update the FPS clock.
img = sensor.snapshot() # Take a picture and return the image.
img.find_edges(image.EDGE_CANNY, threshold=(50, 80))
print("\nFrequency Scaling Test...")
for f in cpufreq.get_supported_frequencies():
print("Testing CPU Freq: %dMHz..." % (f))
cpufreq.set_frequency(f)
clock.reset()
test_image_processing()
freqs = cpufreq.get_current_frequencies()
print(
"CPU Freq:%dMHz HCLK:%dMhz PCLK1:%dMhz PCLK2:%dMhz FPS:%.2f"
% (freqs[0], freqs[1], freqs[2], freqs[3], clock.fps())
)

View File

@ -0,0 +1,7 @@
# I2C scanner examples
#
from machine import I2C
i2c = I2C(1, freq=400_000)
for addr in i2c.scan():
print("Found device at address %d:0x%x" % (bus, addr))

View File

@ -0,0 +1,30 @@
# LED Control Example
#
# This example shows how to control the RGB LED.
import time
from machine import LED
red_led = LED("LED_RED")
green_led = LED("LED_GREEN")
blue_led = LED("LED_BLUE")
def led_control(x):
if (x & 1) == 0:
red_led.off()
elif (x & 1) == 1:
red_led.on()
if (x & 2) == 0:
green_led.off()
elif (x & 2) == 2:
green_led.on()
if (x & 4) == 0:
blue_led.off()
elif (x & 4) == 4:
blue_led.on()
while True:
for i in range(16):
led_control(i)
time.sleep_ms(500)

View File

@ -0,0 +1,13 @@
# Pin Control Example
#
# This example shows how to use the I/O pins in GPIO mode.
from machine import Pin
# Connect a switch to pin 0 that will pull it low when the switch is closed.
# Pin 1 will then light up.
pin0 = Pin("D0", Pin.IN, Pin.PULL_UP)
pin1 = Pin("D1", Pin.OUT, Pin.PULL_NONE)
while True:
pin1.value(not pin0.value())

View File

@ -0,0 +1,28 @@
# PWM Control Example
#
# This example shows how to use PWM.
import time
from pyb import Pin, Timer
class PWM:
def __init__(self, pin, tim, ch):
self.pin = pin
self.tim = tim
self.ch = ch
pwms = {
"PWM1": PWM("D7", 3, 1),
"PWM2": PWM("D8", 4, 3),
"PWM3": PWM("D9", 4, 4),
}
# Generate a 1KHz square wave with 50% cycle on the following PWM.
for k, pwm in pwms.items():
tim = Timer(pwm.tim, freq=1000) # Frequency in Hz
ch = tim.channel(pwm.ch, Timer.PWM, pin=Pin(pwm.pin), pulse_width_percent=50)
while True:
time.sleep_ms(1000)

View File

@ -0,0 +1,12 @@
# RTC Example
#
# This example shows how to use the RTC.
import time
from machine import RTC
rtc = RTC()
rtc.datetime((2013, 7, 9, 2, 0, 0, 0, 0))
while True:
print(rtc.datetime())
time.sleep_ms(1000)

View File

@ -0,0 +1,81 @@
# SPI Control
#
# This example shows how to use the SPI bus to control the
# 1.8" TFT LCD display (JD-T18003-T01) with ST7735R driver.
import sensor
import time
from machine import Pin
from machine import SPI
cs = Pin("D2", Pin.OPEN_DRAIN)
rst = Pin("D3", Pin.OUT)
rs = Pin("D4", Pin.OUT)
# NOTE: The SPI clock frequency will not always be the requested frequency. The hardware only supports
# frequencies that are the bus frequency divided by a prescaler (which can be 2, 4, 8, 16, 32, 64, 128 or 256).
spi = SPI(5, baudrate=int(1000000000 / 66), polarity=0, phase=0)
def write_command_byte(c):
cs.low()
rs.low()
spi.send(c)
cs.high()
def write_data_byte(c):
cs.low()
rs.high()
spi.send(c)
cs.high()
def write_command(c, *data):
write_command_byte(c)
if data:
for d in data:
write_data_byte(d)
def write_image(img):
cs.low()
rs.high()
spi.send(img)
cs.high()
# Reset the LCD.
rst.low()
time.sleep_ms(100)
rst.high()
time.sleep_ms(100)
write_command(0x11) # Sleep Exit
time.sleep_ms(120)
# Memory Data Access Control
# Write 0xC8 for BGR mode.
write_command(0x36, 0xC0)
# Interface Pixel Format
write_command(0x3A, 0x05)
# Display On
write_command(0x29)
sensor.reset() # Initialize the camera sensor.
sensor.set_pixformat(sensor.RGB565)
sensor.set_framesize(sensor.QQVGA2)
sensor.skip_frames(time=2000) # Let new settings take affect.
clock = time.clock() # Tracks FPS.
while True:
clock.tick() # Track elapsed milliseconds between snapshots().
img = sensor.snapshot() # Take a picture and return the image.
write_command(0x2C) # Write image command...
write_image(img)
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
# connected to your computer. The FPS should increase once disconnected.

View File

@ -0,0 +1,22 @@
# Timer Control Example
#
# This example shows how to use a timer for callbacks.
import time
from pyb import LED
from pyb import Timer
blue_led = LED(3)
# we will receive the timer object when being called
# Note: functions that allocate memory are Not allowed in callbacks
def tick(timer):
blue_led.toggle()
tim = Timer(2, freq=1) # create a timer object using timer 2 - trigger at 1Hz
tim.callback(tick) # set the callback to our tick function
while True:
time.sleep_ms(1000)

View File

@ -0,0 +1,13 @@
# UART Control
#
# This example shows how to use the serial port on your OpenMV Cam.
import time
from machine import UART
# Init UART object.
uart = UART(4, 19200)
while True:
uart.write("Hello World!\r")
time.sleep_ms(1000)

View File

@ -0,0 +1,32 @@
# Making OpenMV Camera act as a Mouse using HID.
#
# First we need to create boot.py file to change the default USB mode (VCP+MSC).
# Note: It is recommended to save this file to uSD card not the flash storage.
# This will make it easier to restore the default OpenMV (VCP+MSC) USB mode later
# by just deleting boot.py from uSD using the PC.
#
# Add the following script to boot.py:
#
# import pyb
# pyb.usb_mode('VCP+HID') # serial device + mouse (UNCOMMENT THIS LINE!)
# pyb.usb_mode('VCP+MSC') # serial device + storage device (default)
# pyb.usb_mode('VCP+HID', hid=pyb.hid_keyboard) # serial device + keyboard
#
# Copy boot.py to the root of the uSD card and restart the camera, it should now
# act as a serial device and a mouse.
#
# Connect to the camera using the IDE and run this script, you should see the mouse move.
#
# Note: To restore the default VCP+MSC USB mode, either use the PC to remove boot.py
# from the uSD card, or use the following Python line: import os; os.remove('boot.py')
import pyb
import time
hid = pyb.USB_HID()
while True:
# x, y and scroll
# move 10 pixels to the right
hid.send((0, 10, 0, 0))
time.sleep_ms(500)

View File

@ -0,0 +1,38 @@
# USB VCP example.
# This example shows how to use the USB VCP class to send an image to PC on demand.
#
# WARNING:
# This script should NOT be run from the IDE or command line, it should be saved as main.py
# Note the following commented script shows how to receive the image from the host side.
#
# #!/usr/bin/env python2.7
# import sys, serial, struct
# port = '/dev/ttyACM0'
# sp = serial.Serial(port, baudrate=115200, bytesize=serial.EIGHTBITS, parity=serial.PARITY_NONE,
# xonxoff=False, rtscts=False, stopbits=serial.STOPBITS_ONE, timeout=None, dsrdtr=True)
# sp.setDTR(True) # dsrdtr is ignored on Windows.
# sp.write("snap")
# sp.flush()
# size = struct.unpack('<L', sp.read(4))[0]
# img = sp.read(size)
# sp.close()
#
# with open("img.jpg", "w") as f:
# f.write(img)
import sensor
import ustruct
from pyb import USB_VCP
usb = USB_VCP()
sensor.reset() # Reset and initialize the sensor.
sensor.set_pixformat(sensor.RGB565) # Set pixel format to RGB565 (or GRAYSCALE)
sensor.set_framesize(sensor.QVGA) # Set frame size to QVGA (320x240)
sensor.skip_frames(time=2000) # Wait for settings take effect.
while True:
cmd = usb.recv(4, timeout=5000)
if cmd == b"snap":
img = sensor.snapshot().compress()
usb.send(ustruct.pack("<L", img.size()))
usb.send(img)

View File

@ -0,0 +1,23 @@
# VSYNC GPIO output example.
#
# This example shows how to toggle a pin on VSYNC interrupt.
import sensor
import time
from machine import LED
sensor.reset() # Reset and initialize the sensor.
sensor.set_pixformat(sensor.RGB565) # Set pixel format to RGB565 (or GRAYSCALE)
sensor.set_framesize(sensor.QVGA) # Set frame size to QVGA (320x240)
# This pin will be toggled on/off on VSYNC rising and falling edges.
led = LED("LED_BLUE")
sensor.set_vsync_callback(lambda state, led=led: led.value(state))
clock = time.clock() # Create a clock object to track the FPS.
while True:
clock.tick() # Update the FPS clock.
img = sensor.snapshot() # Take a picture and return the image.
print(clock.fps()) # Note: OpenMV Cam runs about half as fast when connected
# to the IDE. The FPS should increase once disconnected.

View File

@ -0,0 +1,62 @@
# LCD, touch panel and camera example.
import sensor
import time
import display
from gt911 import GT911
from machine import I2C
IMG_OFFSET = 80
touch_detected = False
points_colors = ((255, 0, 0), (0, 255, 0), (0, 0, 255), (0, 255, 255), (255, 255, 0))
sensor.reset() # Reset and initialize the sensor.
sensor.set_pixformat(sensor.RGB565) # Set pixel format to RGB565 (or GRAYSCALE)
sensor.set_framesize(sensor.VGA) # Set frame size to QVGA (320x240)
sensor.set_vflip(True) # Flip image for the display
# sensor.set_transpose(True) # Hardware transpose will be slower at this resolution
lcd = display.DSIDisplay(
framesize=display.FWVGA, portrait=True, refresh=60, controller=display.ST7701()
)
# Note use pin numbers or names not Pin objects because the
# driver needs to change pin directions to reset the controller.
touch = GT911(
I2C(4, freq=400_000),
reset_pin="PI2",
irq_pin="PI1",
touch_points=5,
refresh_rate=240,
touch_callback=lambda pin: globals().update(touch_detected=True),
)
# Create a clock object to track the FPS.
clock = time.clock()
while True:
clock.tick() # Update the FPS clock.
# Capture a new frame
img = sensor.snapshot()
# Draw touch points if touch was detected.
if touch_detected:
n, points = touch.read_points()
for i in range(0, n):
img.draw_circle(
points[i][0] - IMG_OFFSET,
points[i][1],
points[i][2] * 3,
points_colors[points[i][3]],
thickness=2,
)
touch_detected = False
# Rotate the image in place.
img.replace(transpose=True)
# Draw the image on the display.
lcd.write(img, y=IMG_OFFSET)
print(clock.fps())

View File

@ -0,0 +1,70 @@
import image
import audio
from ulab import numpy as np
from ulab import utils
CHANNELS = 1
SIZE = 256 // (2 * CHANNELS)
raw_buf = None
fb = image.Image(SIZE + 50, SIZE, image.RGB565, copy_to_fb=True)
audio.init(channels=CHANNELS, frequency=16000, gain_db=24, highpass=0.9883)
def audio_callback(buf):
# NOTE: do Not call any function that allocates memory.
global raw_buf
if raw_buf is None:
raw_buf = buf
# Start audio streaming
audio.start_streaming(audio_callback)
def draw_fft(img, fft_buf):
fft_buf = (fft_buf / max(fft_buf)) * SIZE
fft_buf = np.log10(fft_buf + 1) * 20
color = (0xFF, 0x0F, 0x00)
for i in range(0, SIZE):
img.draw_line(i, SIZE, i, SIZE - int(fft_buf[i]), color, 1)
def draw_audio_bar(img, level, offset):
blk_size = SIZE // 10
color = (0xFF, 0x00, 0xF0)
blk_space = blk_size // 4
for i in range(0, int(round(level / 10))):
fb.draw_rectangle(
SIZE + offset,
SIZE - ((i + 1) * blk_size) + blk_space,
20,
blk_size - blk_space,
color,
1,
True,
)
while True:
if raw_buf is not None:
pcm_buf = np.frombuffer(raw_buf, dtype=np.int16)
raw_buf = None
if CHANNELS == 1:
fft_buf = utils.spectrogram(pcm_buf)
l_lvl = int((np.mean(abs(pcm_buf[1::2])) / 32768) * 100)
else:
fft_buf = utils.spectrogram(pcm_buf[0::2])
l_lvl = int((np.mean(abs(pcm_buf[1::2])) / 32768) * 100)
r_lvl = int((np.mean(abs(pcm_buf[0::2])) / 32768) * 100)
fb.clear()
draw_fft(fb, fft_buf)
draw_audio_bar(fb, l_lvl, 0)
if CHANNELS == 2:
draw_audio_bar(fb, r_lvl, 25)
fb.flush()
# Stop streaming
audio.stop_streaming()

View File

@ -0,0 +1,36 @@
# MicroSpeech demo.
#
# Download the pre-trained Yes/No model from here:
# https://raw.githubusercontent.com/iabdalkader/microspeech-yesno-model/main/model.tflite
# Save the model to storage, reset and run the example.
import audio
import time
import tf
import micro_speech
import pyb
labels = ["Silence", "Unknown", "Yes", "No"]
led_red = pyb.LED(1)
led_green = pyb.LED(2)
model = tf.load("/model.tflite")
speech = micro_speech.MicroSpeech()
audio.init(channels=1, frequency=16000, gain=24, highpass=0.9883)
# Start audio streaming
audio.start_streaming(speech.audio_callback)
while True:
# Run micro-speech without a timeout and filter detections by label index.
idx = speech.listen(model, timeout=0, threshold=0.70, filter=[2, 3])
led = led_green if idx == 2 else led_red
print(labels[idx])
for i in range(0, 4):
led.on()
time.sleep_ms(25)
led.off()
time.sleep_ms(25)
# Stop streaming
audio.stop_streaming()

View File

@ -0,0 +1,21 @@
# Connect Example
#
# This example shows how to connect your OpenMV Cam with a WiFi shield to the net.
import network
import time
SSID = "" # Network SSID
KEY = "" # Network key
# Init wlan module and connect to network
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
wlan.connect(SSID, KEY)
while not wlan.isconnected():
print('Trying to connect to "{:s}"...'.format(SSID))
time.sleep_ms(1000)
# We should have a valid IP now via DHCP
print("WiFi Connected ", wlan.ifconfig())

View File

@ -0,0 +1,22 @@
# DNS Example
#
# This example shows how to get the IP address for websites via DNS.
import network
import time
SSID = "" # Network SSID
KEY = "" # Network key
# Init wlan module and connect to network
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
wlan.connect(SSID, KEY)
while not wlan.isconnected():
print('Trying to connect to "{:s}"...'.format(SSID))
time.sleep_ms(1000)
# We should have a valid IP now via DHCP
print("WiFi Connected ", wlan.ifconfig())
print(usocket.getaddrinfo("www.google.com", 80)[0][4])

View File

@ -0,0 +1,42 @@
# Simple HTTP client example.
import network
import socket
import time
# AP info
SSID = "" # Network SSID
KEY = "" # Network key
PORT = 80
HOST = "www.google.com"
# Init wlan module and connect to network
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
wlan.connect(SSID, KEY)
while not wlan.isconnected():
print('Trying to connect to "{:s}"...'.format(SSID))
time.sleep_ms(1000)
# We should have a valid IP now via DHCP
print("WiFi Connected ", wlan.ifconfig())
# Get addr info via DNS
addr = socket.getaddrinfo(HOST, PORT)[0][4]
print(addr)
# Create a new socket and connect to addr
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect(addr)
# Set timeout
client.settimeout(3.0)
# Send HTTP request and recv response
client.send("GET / HTTP/1.1\r\nHost: %s\r\n\r\n" % (HOST))
print(client.recv(1024))
# Close socket
client.close()

View File

@ -0,0 +1,53 @@
# Simple HTTPS client example.
import network
import socket
import ssl
import time
# AP info
SSID = "" # Network SSID
KEY = "" # Network key
PORT = 443
HOST = "www.google.com"
# Init wlan module and connect to network
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
wlan.connect(SSID, KEY)
while not wlan.isconnected():
print('Trying to connect to "{:s}"...'.format(SSID))
time.sleep_ms(1000)
# We should have a valid IP now via DHCP
print("WiFi Connected ", wlan.ifconfig())
# Get addr info via DNS
addr = socket.getaddrinfo(HOST, PORT)[0][4]
print(addr)
# Create a new socket and connect to addr
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect(addr)
# Set timeout
client.settimeout(3.0)
client = ssl.wrap_socket(client, server_hostname=HOST)
# Send HTTP request and recv response
request = "GET / HTTP/1.1\r\n"
request += "HOST: %s\r\n"
request += "User-Agent: Mozilla/5.0\r\n"
request += "Connection: keep-alive\r\n\r\n"
# Add more headers if needed.
client.write(request % (HOST) + "\r\n")
response = client.read(1024)
for l in response.split(b"\r\n"):
print(l.decode())
# Close socket
client.close()

View File

@ -0,0 +1,89 @@
# MJPEG Streaming
#
# This example shows off how to do MJPEG streaming to a FIREFOX webrowser
# Chrome, Firefox and MJpegViewer App on Android have been tested.
# Connect to the IP address/port printed out from ifconfig to view the stream.
import sensor
import time
import network
import socket
SSID = "" # Network SSID
KEY = "" # Network key
HOST = "" # Use first available interface
PORT = 8080 # Arbitrary non-privileged port
# Init sensor
sensor.reset()
sensor.set_framesize(sensor.QVGA)
sensor.set_pixformat(sensor.RGB565)
# Init wlan module and connect to network
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
wlan.connect(SSID, KEY)
while not wlan.isconnected():
print('Trying to connect to "{:s}"...'.format(SSID))
time.sleep_ms(1000)
# We should have a valid IP now via DHCP
print("WiFi Connected ", wlan.ifconfig())
# Create server socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
# Bind and listen
s.bind([HOST, PORT])
s.listen(5)
# Set server socket to blocking
s.setblocking(True)
def start_streaming(s):
print("Waiting for connections..")
client, addr = s.accept()
# set client socket timeout to 5s
client.settimeout(5.0)
print("Connected to " + addr[0] + ":" + str(addr[1]))
# Read request from client
data = client.recv(1024)
# Should parse client request here
# Send multipart header
client.sendall(
"HTTP/1.1 200 OK\r\n"
"Server: OpenMV\r\n"
"Content-Type: multipart/x-mixed-replace;boundary=openmv\r\n"
"Cache-Control: no-cache\r\n"
"Pragma: no-cache\r\n\r\n"
)
# FPS clock
clock = time.clock()
# Start streaming images
# NOTE: Disable IDE preview to increase streaming FPS.
while True:
clock.tick() # Track elapsed milliseconds between snapshots().
frame = sensor.snapshot()
cframe = frame.compressed(quality=35)
header = (
"\r\n--openmv\r\n"
"Content-Type: image/jpeg\r\n"
"Content-Length:" + str(cframe.size()) + "\r\n\r\n"
)
client.sendall(header)
client.sendall(cframe)
print(clock.fps())
while True:
try:
start_streaming(s)
except OSError as e:
print("socket error: ", e)
# sys.print_exception(e)

View File

@ -0,0 +1,34 @@
# MQTT Example.
# This example shows how to use the MQTT library to publish to a topic.
#
# 1) Copy the mqtt.py library to OpenMV storage.
# 2) Run this script on the OpenMV camera.
# 3) Install the mosquitto client on PC and run the following command:
# mosquitto_sub -h test.mosquitto.org -t "openmv/test" -v
#
# NOTE: If the mosquitto broker is unreachable, try another broker (For example: broker.hivemq.com)
import time
import network
from mqtt import MQTTClient
SSID = "" # Network SSID
KEY = "" # Network key
# Init wlan module and connect to network
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
wlan.connect(SSID, KEY)
while not wlan.isconnected():
print('Trying to connect to "{:s}"...'.format(SSID))
time.sleep_ms(1000)
# We should have a valid IP now via DHCP
print("WiFi Connected ", wlan.ifconfig())
client = MQTTClient("openmv", "test.mosquitto.org", port=1883)
client.connect()
while True:
client.publish("openmv/test", "Hello World!")
time.sleep_ms(1000)

View File

@ -0,0 +1,43 @@
# MQTT Example.
# This example shows how to use the MQTT library to subscribe to a topic.
#
# 1) Copy the mqtt.py library to OpenMV storage.
# 2) Run this script on the OpenMV camera.
# 3) Install the mosquitto client on PC and run the following command:
# mosquitto_pub -t "openmv/test" -m "Hello World!" -h test.mosquitto.org -p 1883
#
# NOTE: If the mosquitto broker is unreachable, try another broker (For example: broker.hivemq.com)
import time
import network
from mqtt import MQTTClient
SSID = "" # Network SSID
KEY = "" # Network key
# Init wlan module and connect to network
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
wlan.connect(SSID, KEY)
while not wlan.isconnected():
print('Trying to connect to "{:s}"...'.format(SSID))
time.sleep_ms(1000)
# We should have a valid IP now via DHCP
print("WiFi Connected ", wlan.ifconfig())
client = MQTTClient("openmv", "test.mosquitto.org", port=1883)
client.connect()
def callback(topic, msg):
print(topic, msg)
# must set callback first
client.set_callback(callback)
client.subscribe("openmv/test")
while True:
client.check_msg() # poll for messages.
time.sleep_ms(1000)

View File

@ -0,0 +1,40 @@
# NTP Example
#
# This example shows how to get the current time using NTP with the WiFi shield.
import network
import socket
import struct
import time
SSID = "" # Network SSID
KEY = "" # Network key
TIMESTAMP = 2208988800 + 946684800
# Init wlan module and connect to network
print("Trying to connect... (This may take a while)...")
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
wlan.connect(SSID, KEY)
while not wlan.isconnected():
print('Trying to connect to "{:s}"...'.format(SSID))
time.sleep_ms(1000)
# We should have a valid IP now via DHCP
print("WiFi Connected ", wlan.ifconfig())
# Create new socket
client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Get addr info via DNS
addr = socket.getaddrinfo("pool.ntp.org", 123)[0][4]
# Send query
client.sendto("\x1b" + 47 * "\0", addr)
data, address = client.recvfrom(1024)
# Print time
t = struct.unpack(">IIIIIIIIIIII", data)[10] - TIMESTAMP
print("Year:%d Month:%d Day:%d Time: %d:%d:%d" % (time.localtime(t)[0:6]))

View File

@ -0,0 +1,20 @@
# Scan Example
#
# This example shows how to scan for networks with the WiFi shield.
import time
import network
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
print("Scanning...")
while True:
scan_result = wlan.scan()
for ap in scan_result:
print(
"SSID: %s BSSID: %s Channel: %d RSSI: %d Auth: %d"
% (ap[0], ":".join(["%X" % i for i in ap[1]]), ap[2], ap[3], ap[4])
)
print()
time.sleep_ms(1000)

View File

@ -0,0 +1,39 @@
# NTP Example using static IP.
#
# This example shows how to get the current time using NTP with the WiFi shield.
import network
import socket
import struct
import time
SSID = "" # Network SSID
KEY = "" # Network key
TIMESTAMP = 2208988800 + 946684800
# Init wlan module and connect to network
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
# ifconfig must be called before connect()
wlan.ifconfig(("192.168.1.200", "255.255.255.0", "192.168.1.1", "192.168.1.1"))
wlan.connect(SSID, KEY)
while not wlan.isconnected():
print('Trying to connect to "{:s}"...'.format(SSID))
time.sleep_ms(1000)
# Create new socket
client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Get addr info via DNS
addr = socket.getaddrinfo("pool.ntp.org", 123)[0][4]
# Send query
client.sendto("\x1b" + 47 * "\0", addr)
data, address = client.recvfrom(1024)
# Print time
t = struct.unpack(">IIIIIIIIIIII", data)[10] - TIMESTAMP
print("Year:%d Month:%d Day:%d Time: %d:%d:%d" % (time.localtime(t)[0:6]))

View File

@ -0,0 +1,61 @@
# Bluetooth Blinky Example
#
# Use nRFConnect app from the App store, connect to the Nano and write 1/0 to control the LED.
import bluetooth
import time
from ble_advertising import advertising_payload
from machine import LED
from micropython import const
_IRQ_CENTRAL_CONNECT = const(1)
_IRQ_CENTRAL_DISCONNECT = const(2)
_IRQ_GATTS_WRITE = const(3)
_FLAG_READ = const(0x0002)
_FLAG_WRITE = const(0x0008)
_FLAG_NOTIFY = const(0x0010)
_FLAG_INDICATE = const(0x0020)
_SERVICE_UUID = bluetooth.UUID(0x1523)
_LED_CHAR_UUID = (bluetooth.UUID(0x1525), _FLAG_WRITE)
_LED_SERVICE = (
_SERVICE_UUID,
(_LED_CHAR_UUID,),
)
class BLETemperature:
def __init__(self, ble, name="Giga-H7"):
self._ble = ble
self._ble.active(True)
self._ble.irq(self._irq)
((self._handle,),) = self._ble.gatts_register_services((_LED_SERVICE,))
self._connections = set()
self._payload = advertising_payload(name=name, services=[_SERVICE_UUID])
self._advertise()
self.led = LED("LED_BLUE")
def _irq(self, event, data):
# Track connections so we can send notifications.
if event == _IRQ_CENTRAL_CONNECT:
conn_handle, _, _ = data
self._connections.add(conn_handle)
elif event == _IRQ_CENTRAL_DISCONNECT:
conn_handle, _, _ = data
self._connections.remove(conn_handle)
# Start advertising again to allow a new connection.
self._advertise()
elif event == _IRQ_GATTS_WRITE:
self.led.value(self._ble.gatts_read(data[-1])[0])
def _advertise(self, interval_us=500000):
self._ble.gap_advertise(interval_us, adv_data=self._payload)
if __name__ == "__main__":
ble = bluetooth.BLE()
temp = BLETemperature(ble)
while True:
time.sleep_ms(1000)

View File

@ -0,0 +1,98 @@
# This example demonstrates a simple temperature sensor peripheral.
#
# The sensor's local value updates every second, and it will notify
# any connected central every 10 seconds.
import bluetooth
import random
import struct
import time
from ble_advertising import advertising_payload
from machine import LED
from micropython import const
_IRQ_CENTRAL_CONNECT = const(1)
_IRQ_CENTRAL_DISCONNECT = const(2)
_IRQ_GATTS_INDICATE_DONE = const(20)
_FLAG_READ = const(0x0002)
_FLAG_NOTIFY = const(0x0010)
_FLAG_INDICATE = const(0x0020)
# org.bluetooth.service.environmental_sensing
_ENV_SENSE_UUID = bluetooth.UUID(0x181A)
# org.bluetooth.characteristic.temperature
_TEMP_CHAR = (
bluetooth.UUID(0x2A6E),
_FLAG_READ | _FLAG_NOTIFY | _FLAG_INDICATE,
)
_ENV_SENSE_SERVICE = (
_ENV_SENSE_UUID,
(_TEMP_CHAR,),
)
# org.bluetooth.characteristic.gap.appearance.xml
_ADV_APPEARANCE_GENERIC_THERMOMETER = const(768)
class BLETemperature:
def __init__(self, ble, name="Giga-H7"):
self._ble = ble
self._ble.active(True)
self._ble.irq(self._irq)
((self._handle,),) = self._ble.gatts_register_services((_ENV_SENSE_SERVICE,))
self._connections = set()
self._payload = advertising_payload(
name=name,
services=[_ENV_SENSE_UUID],
appearance=_ADV_APPEARANCE_GENERIC_THERMOMETER,
)
self._advertise()
self.led = LED("LED_BLUE")
def _irq(self, event, data):
# Track connections so we can send notifications.
if event == _IRQ_CENTRAL_CONNECT:
conn_handle, _, _ = data
self._connections.add(conn_handle)
self.led.on()
elif event == _IRQ_CENTRAL_DISCONNECT:
conn_handle, _, _ = data
self._connections.remove(conn_handle)
# Start advertising again to allow a new connection.
self._advertise()
self.led.off()
elif event == _IRQ_GATTS_INDICATE_DONE:
conn_handle, value_handle, status = data
def set_temperature(self, temp_deg_c, notify=False, indicate=False):
# Data is sint16 in degrees Celsius with a resolution of 0.01 degrees Celsius.
# Write the local value, ready for a central to read.
self._ble.gatts_write(self._handle, struct.pack("<h", int(temp_deg_c * 100)))
if notify or indicate:
for conn_handle in self._connections:
if notify:
# Notify connected centrals.
self._ble.gatts_notify(conn_handle, self._handle)
if indicate:
# Indicate connected centrals.
self._ble.gatts_indicate(conn_handle, self._handle)
def _advertise(self, interval_us=500000):
self._ble.gap_advertise(interval_us, adv_data=self._payload)
if __name__ == "__main__":
ble = bluetooth.BLE()
temp = BLETemperature(ble)
t = 25
i = 0
while True:
# Write every second, notify every 10 seconds.
i = (i + 1) % 10
temp.set_temperature(t, notify=i == 0, indicate=False)
# Random walk the temperature.
t += random.uniform(-0.5, 0.5)
time.sleep_ms(1000)

View File

@ -0,0 +1,63 @@
from micropython import const
import uasyncio as asyncio
import aioble
import bluetooth
import random
import struct
# org.bluetooth.service.environmental_sensing
_ENV_SENSE_UUID = bluetooth.UUID(0x181A)
# org.bluetooth.characteristic.temperature
_ENV_SENSE_TEMP_UUID = bluetooth.UUID(0x2A6E)
# org.bluetooth.characteristic.gap.appearance.xml
_ADV_APPEARANCE_GENERIC_THERMOMETER = const(768)
# How frequently to send advertising beacons.
_ADV_INTERVAL_MS = 250_000
# Register GATT server.
temp_service = aioble.Service(_ENV_SENSE_UUID)
temp_characteristic = aioble.Characteristic(
temp_service, _ENV_SENSE_TEMP_UUID, read=True, notify=True
)
aioble.register_services(temp_service)
# Helper to encode the temperature characteristic encoding (sint16, hundredths of a degree).
def _encode_temperature(temp_deg_c):
return struct.pack("<h", int(temp_deg_c * 100))
# This would be periodically polling a hardware sensor.
async def sensor_task():
t = 24.5
while True:
temp_characteristic.write(_encode_temperature(t))
t += random.uniform(-0.5, 0.5)
await asyncio.sleep_ms(1000)
# Serially wait for connections. Don't advertise while a central is
# connected.
async def peripheral_task():
while True:
async with await aioble.advertise(
_ADV_INTERVAL_MS,
name="Giga-H7",
services=[_ENV_SENSE_UUID],
appearance=_ADV_APPEARANCE_GENERIC_THERMOMETER,
) as connection:
print("Connection from", connection.device)
await connection.disconnected()
# Run both tasks.
async def main():
t1 = asyncio.create_task(sensor_task())
t2 = asyncio.create_task(peripheral_task())
await asyncio.gather(t1, t2)
asyncio.run(main())

View File

@ -0,0 +1,27 @@
# Deep Sleep Mode Example
# This example demonstrates the low-power deep sleep mode plus sensor shutdown.
# Note the camera will reset after wake-up from deep sleep. To find out if the cause of reset
# is deep sleep, call the machine.reset_cause() function and test for machine.DEEPSLEEP_RESET
import machine
import sensor
# Create and init RTC object.
rtc = machine.RTC()
# (year, month, day[, hour[, minute[, second[, microsecond[, tzinfo]]]]])
rtc.datetime((2014, 5, 1, 4, 13, 0, 0, 0))
# Print RTC info.
print(rtc.datetime())
sensor.reset()
# Shutdown the sensor (pulls PWDN high).
sensor.shutdown(True)
# Enable RTC interrupts every 30 seconds.
# Note the camera will RESET after wakeup from Deepsleep Mode.
rtc.wakeup(30000)
# Enter Deepsleep Mode.
machine.deepsleep()

View File

@ -0,0 +1,26 @@
# ExtInt Wake-Up from Stop Mode Example
# This example demonstrates using external interrupts to wake up from low-power mode.
import time
import machine
from machine import LED
from machine import Pin
from pyb import ExtInt
def callback(line):
pass
led = LED("LED_BLUE")
pin = Pin("D0", Pin.IN, Pin.PULL_UP)
ext = ExtInt(pin, ExtInt.IRQ_FALLING, Pin.PULL_UP, callback=lambda line: None)
# Enter Stop Mode. Note the IDE will disconnect.
machine.sleep()
while True:
led.on()
time.sleep_ms(100)
led.off()
time.sleep_ms(100)

View File

@ -0,0 +1,19 @@
# Stop Mode Example
# This example demonstrates using the low-power Stop Mode.
import machine
# Create and init RTC object.
rtc = machine.RTC()
# (year, month, day[, hour[, minute[, second[, microsecond[, tzinfo]]]]])
rtc.datetime((2014, 5, 1, 4, 13, 0, 0, 0))
# Print RTC info.
print(rtc.datetime())
# Enable RTC interrupts every 5 seconds.
rtc.wakeup(5000)
# Enter Stop Mode.
# Note the IDE will disconnect.
machine.sleep()