mirror of
https://github.com/openmv/openmv.git
synced 2025-11-04 14:49:50 +08:00
Merge pull request #2874 from kwagyeman/kwabena/add_yolo_v2_v5_examples
Some checks failed
🔥 Firmware Build / build-firmware (false, 0, false, DOCKER) (push) Has been cancelled
🔥 Firmware Build / build-firmware (false, 0, true, MPS2_AN500) (push) Has been cancelled
🔥 Firmware Build / build-firmware (false, 0, true, MPS3_AN547) (push) Has been cancelled
🔥 Firmware Build / build-firmware (false, 1, false, OPENMV_N6) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, ARDUINO_GIGA) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, ARDUINO_NANO_33_BLE_SENSE) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, ARDUINO_NANO_RP2040_CONNECT) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, ARDUINO_NICLA_VISION) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, ARDUINO_PORTENTA_H7) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV2) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV3) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV4) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV4P) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMVPT) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV_AE3) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV_N6) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV_RT1060) (push) Has been cancelled
🔥 Firmware Build / code-size-report (push) Has been cancelled
🔥 Firmware Build / stable-release (push) Has been cancelled
🔥 Firmware Build / development-release (push) Has been cancelled
Some checks failed
🔥 Firmware Build / build-firmware (false, 0, false, DOCKER) (push) Has been cancelled
🔥 Firmware Build / build-firmware (false, 0, true, MPS2_AN500) (push) Has been cancelled
🔥 Firmware Build / build-firmware (false, 0, true, MPS3_AN547) (push) Has been cancelled
🔥 Firmware Build / build-firmware (false, 1, false, OPENMV_N6) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, ARDUINO_GIGA) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, ARDUINO_NANO_33_BLE_SENSE) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, ARDUINO_NANO_RP2040_CONNECT) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, ARDUINO_NICLA_VISION) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, ARDUINO_PORTENTA_H7) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV2) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV3) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV4) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV4P) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMVPT) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV_AE3) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV_N6) (push) Has been cancelled
🔥 Firmware Build / build-firmware (true, 0, false, OPENMV_RT1060) (push) Has been cancelled
🔥 Firmware Build / code-size-report (push) Has been cancelled
🔥 Firmware Build / stable-release (push) Has been cancelled
🔥 Firmware Build / development-release (push) Has been cancelled
scripts/examples: Add yolov2 and yolov5 template examples.
This commit is contained in:
commit
d038dc7ab8
@ -14,12 +14,6 @@
|
||||
"alignment": 16,
|
||||
"optimize": "Performance"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/yolo_v5_224_nano.tflite",
|
||||
"alignment": 16,
|
||||
"optimize": "Performance"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/force_int_quant.tflite",
|
||||
|
||||
@ -14,12 +14,6 @@
|
||||
"alignment": 16,
|
||||
"optimize": "Performance"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/yolo_v5_224_nano.tflite",
|
||||
"alignment": 16,
|
||||
"optimize": "Performance"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/force_int_quant.tflite",
|
||||
|
||||
@ -14,12 +14,6 @@
|
||||
"alignment": 16,
|
||||
"optimize": "Performance"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/yolo_v5_224_nano.tflite",
|
||||
"alignment": 16,
|
||||
"optimize": "Performance"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/force_int_quant.tflite",
|
||||
|
||||
@ -14,12 +14,6 @@
|
||||
"alignment": 16,
|
||||
"optimize": "Performance"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/yolo_v5_224_nano.tflite",
|
||||
"alignment": 16,
|
||||
"optimize": "Performance"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/force_int_quant.tflite",
|
||||
|
||||
@ -26,12 +26,6 @@
|
||||
"alignment": 16,
|
||||
"optimize": "Performance"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/yolo_v2_224_small.tflite",
|
||||
"alignment": 16,
|
||||
"optimize": "Performance"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/force_int_quant.tflite",
|
||||
|
||||
@ -14,18 +14,6 @@
|
||||
"alignment": 32,
|
||||
"profile": "default"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/yolo_v2_224_small.tflite",
|
||||
"alignment": 32,
|
||||
"profile": "default"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/yolo_v5_224_nano.tflite",
|
||||
"alignment": 32,
|
||||
"profile": "default"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/blazeface_front_128.tflite",
|
||||
|
||||
@ -14,12 +14,6 @@
|
||||
"alignment": 16,
|
||||
"optimize": "Performance"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/yolo_v5_224_nano.tflite",
|
||||
"alignment": 16,
|
||||
"optimize": "Performance"
|
||||
},
|
||||
{
|
||||
"type": "tflite",
|
||||
"path": "{TOP}/lib/models/force_int_quant.tflite",
|
||||
|
||||
Binary file not shown.
@ -1,2 +0,0 @@
|
||||
background
|
||||
person
|
||||
Binary file not shown.
@ -1,2 +0,0 @@
|
||||
background
|
||||
person
|
||||
@ -0,0 +1,50 @@
|
||||
# This work is licensed under the MIT license.
|
||||
# Copyright (c) 2013-2025 OpenMV LLC. All rights reserved.
|
||||
# https://github.com/openmv/openmv/blob/master/LICENSE
|
||||
#
|
||||
# TensorFlow Lite YOLO V2 Example
|
||||
#
|
||||
# This example runs a YOLO V2 object detection model.
|
||||
# Please see OpenMV IDE's model zoo for example yolo v2 models.
|
||||
#
|
||||
# For more information on YOLO V2, please see:
|
||||
# https://github.com/STMicroelectronics/stm32ai-modelzoo/tree/main/object_detection/tiny_yolo_v2
|
||||
#
|
||||
# NOTE: This exaxmple requires an OpenMV Cam with an NPU like the AE3 or N6 to run real-time.
|
||||
|
||||
import csi
|
||||
import time
|
||||
import ml
|
||||
from ml.postprocessing.darknet import YoloV2
|
||||
|
||||
# Initialize the sensor.
|
||||
csi0 = csi.CSI()
|
||||
csi0.reset()
|
||||
csi0.pixformat(csi.RGB565)
|
||||
csi0.framesize(csi.VGA)
|
||||
csi0.window((400, 400))
|
||||
|
||||
# Load YOLO V2 model from ROM FS.
|
||||
model = ml.Model("/rom/<model_file_name>", postprocess=YoloV2(threshold=0.4))
|
||||
print(model)
|
||||
|
||||
# Visualization parameters.
|
||||
n = len(model.labels)
|
||||
model_class_colors = [(int(255 * i // n), int(255 * (n - i - 1) // n), 255) for i in range(n)]
|
||||
|
||||
clock = time.clock()
|
||||
while True:
|
||||
clock.tick()
|
||||
img = csi0.snapshot()
|
||||
|
||||
# boxes is a list of list per class of ((x, y, w, h), score) tuples
|
||||
boxes = model.predict([img])
|
||||
|
||||
# Draw bounding boxes around the detected objects
|
||||
for i, class_detections in enumerate(boxes):
|
||||
rects = [r for r, score in class_detections]
|
||||
labels = [model.labels[i] for j in range(len(rects))]
|
||||
colors = [model_class_colors[i] for j in range(len(rects))]
|
||||
ml.utils.draw_predictions(img, rects, labels, colors, format=None)
|
||||
|
||||
print(clock.fps(), "fps")
|
||||
@ -0,0 +1,50 @@
|
||||
# This work is licensed under the MIT license.
|
||||
# Copyright (c) 2013-2025 OpenMV LLC. All rights reserved.
|
||||
# https://github.com/openmv/openmv/blob/master/LICENSE
|
||||
#
|
||||
# TensorFlow Lite YOLO V5 Example
|
||||
#
|
||||
# This example runs a YOLO V5 object detection model.
|
||||
# Please see OpenMV IDE's model zoo for example yolo v5 models.
|
||||
#
|
||||
# You can train your own custom YOLOV5 models using Edge Impulse:
|
||||
# https://github.com/edgeimpulse/ml-block-yolov5
|
||||
#
|
||||
# NOTE: This exaxmple requires an OpenMV Cam with an NPU like the AE3 or N6 to run real-time.
|
||||
|
||||
import csi
|
||||
import time
|
||||
import ml
|
||||
from ml.postprocessing.ultralytics import YoloV5
|
||||
|
||||
# Initialize the sensor.
|
||||
csi0 = csi.CSI()
|
||||
csi0.reset()
|
||||
csi0.pixformat(csi.RGB565)
|
||||
csi0.framesize(csi.VGA)
|
||||
csi0.window((400, 400))
|
||||
|
||||
# Load YOLO V5 model from ROM FS.
|
||||
model = ml.Model("/rom/<model_file_name>", postprocess=YoloV5(threshold=0.4))
|
||||
print(model)
|
||||
|
||||
# Visualization parameters.
|
||||
n = len(model.labels)
|
||||
model_class_colors = [(int(255 * i // n), int(255 * (n - i - 1) // n), 255) for i in range(n)]
|
||||
|
||||
clock = time.clock()
|
||||
while True:
|
||||
clock.tick()
|
||||
img = csi0.snapshot()
|
||||
|
||||
# boxes is a list of list per class of ((x, y, w, h), score) tuples
|
||||
boxes = model.predict([img])
|
||||
|
||||
# Draw bounding boxes around the detected objects
|
||||
for i, class_detections in enumerate(boxes):
|
||||
rects = [r for r, score in class_detections]
|
||||
labels = [model.labels[i] for j in range(len(rects))]
|
||||
colors = [model_class_colors[i] for j in range(len(rects))]
|
||||
ml.utils.draw_predictions(img, rects, labels, colors, format=None)
|
||||
|
||||
print(clock.fps(), "fps")
|
||||
@ -47,7 +47,6 @@ class YoloV2:
|
||||
def __init__(self, threshold=0.6, anchors=None, nms_threshold=0.1, nms_sigma=0.1):
|
||||
self.threshold = threshold
|
||||
self.anchors = anchors
|
||||
self.anchors_len = len(self.anchors)
|
||||
self.nms_threshold = nms_threshold
|
||||
self.nms_sigma = nms_sigma
|
||||
|
||||
@ -58,6 +57,8 @@ class YoloV2:
|
||||
[5.55170, 9.30660],
|
||||
[9.72600, 11.1422]])
|
||||
|
||||
self.anchors_len = len(self.anchors)
|
||||
|
||||
def __call__(self, model, inputs, outputs):
|
||||
|
||||
def softmax(x):
|
||||
|
||||
Loading…
Reference in New Issue
Block a user