mirror of
https://github.com/openmv/openmv.git
synced 2025-11-04 14:49:50 +08:00
tools: Update converters.
For use with romfs script.
This commit is contained in:
parent
be021e0312
commit
d8cec81ef3
@ -8,19 +8,35 @@
|
||||
#
|
||||
# Haar Cascade binary converter.
|
||||
|
||||
import sys,os
|
||||
import sys
|
||||
import os
|
||||
import struct
|
||||
import argparse
|
||||
from xml.dom import minidom
|
||||
|
||||
def print_cascade_info(path, size, stages, n_features, n_rectangles, c_format):
|
||||
C_GREEN = '\033[92m'
|
||||
C_RED = '\033[91m'
|
||||
C_BLUE = '\033[94m'
|
||||
C_RESET = '\033[0m'
|
||||
|
||||
c_format = "New" if c_format else "Old"
|
||||
c_name = os.path.basename(os.path.splitext(path)[0])
|
||||
c_size = f'{size[0]}x{size[1]}'
|
||||
print(C_BLUE + f"{'Cascade:':<30} " + f"{c_name:<50}")
|
||||
print(C_BLUE + f"{'Format:':<30} " + f"{c_format:<50}")
|
||||
print(C_BLUE + f"{'Size:':<30} " + C_RED + f"{c_size:<50}")
|
||||
print(C_BLUE + f"{'Number of Stages:':<30} " + C_RED + f"{len(stages):<50}")
|
||||
print(C_BLUE + f"{'Number of Features:':<30} " + C_GREEN + f"{n_features:<50}")
|
||||
print(C_BLUE + f"{'Number of Rectangles:':<30} "+ C_GREEN + f"{n_rectangles:<50}")
|
||||
print(C_RESET)
|
||||
|
||||
def cascade_info_universal(path):
|
||||
xmldoc = minidom.parse(path)
|
||||
old_format = xmldoc.getElementsByTagName('stageNum').length == 0
|
||||
if old_format:
|
||||
print("Parsing old XML format..")
|
||||
cascade_info_old(path)
|
||||
else:
|
||||
print("Parsing new XML format..")
|
||||
cascade_info(path)
|
||||
|
||||
def cascade_info(path):
|
||||
@ -51,11 +67,7 @@ def cascade_info(path):
|
||||
rects = f.getElementsByTagName('_')
|
||||
n_rectangles = n_rectangles + len(rects)
|
||||
|
||||
#print some cascade info
|
||||
print("size:%dx%d"%(size[0], size[1]))
|
||||
print("stages:%d"%len(stages))
|
||||
print("features:%d"%n_features)
|
||||
print("rectangles:%d"%n_rectangles)
|
||||
print_cascade_info(path, size, stages, n_features, n_rectangles, True)
|
||||
|
||||
def cascade_info_old(path):
|
||||
#parse xml file
|
||||
@ -87,20 +99,14 @@ def cascade_info_old(path):
|
||||
rects = f.getElementsByTagName('_')
|
||||
n_rectangles = n_rectangles + len(rects)
|
||||
|
||||
#print some cascade info
|
||||
print("size:%dx%d"%(size[0], size[1]))
|
||||
print("stages:%d"%len(stages))
|
||||
print("features:%d"%n_features)
|
||||
print("rectangles:%d"%n_rectangles)
|
||||
print_cascade_info(path, size, stages, n_features, n_rectangles, False)
|
||||
|
||||
def cascade_binary_universal(path, n_stages, name):
|
||||
xmldoc = minidom.parse(path)
|
||||
old_format = xmldoc.getElementsByTagName('stageNum').length == 0
|
||||
if old_format:
|
||||
print("Converting old XML format..")
|
||||
cascade_binary_old(path, n_stages, name)
|
||||
else:
|
||||
print("Converting new XML format..")
|
||||
cascade_binary(path, n_stages, name)
|
||||
|
||||
def cascade_binary(path, n_stages, name):
|
||||
@ -202,12 +208,8 @@ def cascade_binary(path, n_stages, name):
|
||||
l = list(map(int, r.childNodes[0].nodeValue[:-1].split()))
|
||||
fout.write(struct.pack('BBBB', l[0], l[1], l[2], l[3])) #uint8_t
|
||||
|
||||
# print cascade info
|
||||
print("size:%dx%d"%(size[0], size[1]))
|
||||
print("stages:%d"%len(stages))
|
||||
print("features:%d"%n_features)
|
||||
print("rectangles:%d"%n_rectangles)
|
||||
print("binary cascade generated")
|
||||
|
||||
print_cascade_info(path, size, stages, n_features, n_rectangles, True)
|
||||
|
||||
|
||||
def cascade_binary_old(path, n_stages, name):
|
||||
@ -299,12 +301,8 @@ def cascade_binary_old(path, n_stages, name):
|
||||
l = list(map(int, r.childNodes[0].nodeValue[:-1].split()))
|
||||
fout.write(struct.pack('BBBB',l[0], l[1], l[2], l[3])) #uint8_t
|
||||
|
||||
# print cascade info
|
||||
print("size:%dx%d"%(size[0], size[1]))
|
||||
print("stages:%d"%len(stages))
|
||||
print("features:%d"%n_features)
|
||||
print("rectangles:%d"%n_rectangles)
|
||||
print("binary cascade generated")
|
||||
print_cascade_info(path, size, stages, n_features, n_rectangles, False)
|
||||
|
||||
|
||||
def cascade_header(path, n_stages, name):
|
||||
#parse xml file
|
||||
@ -390,12 +388,8 @@ def cascade_header(path, n_stages, name):
|
||||
fout.write("const int8_t %s_rectangles_array[]={%s};\n"
|
||||
%(name, ", ".join(rect(f.getElementsByTagName('_')) for f in feature)))
|
||||
|
||||
# print cascade info
|
||||
print("size:%dx%d"%(size[0], size[1]))
|
||||
print("stages:%d"%len(stages))
|
||||
print("features:%d"%n_features)
|
||||
print("rectangles:%d"%n_rectangles)
|
||||
print("C header cascade generated")
|
||||
print_cascade_info(path, size, stages, n_features, n_rectangles, False)
|
||||
|
||||
|
||||
def main():
|
||||
# CMD args parser
|
||||
|
||||
@ -17,16 +17,15 @@ import binascii
|
||||
import subprocess
|
||||
|
||||
|
||||
def run_vela(model_path, model_name, args):
|
||||
vela_dir = f'{args.build_dir}/{model_name}'
|
||||
def vela_compile(model_path, build_dir, vela_args):
|
||||
vela_ini = os.path.dirname(os.path.abspath(__file__))
|
||||
vela_args = args.vela_args.split()
|
||||
model = os.path.basename(os.path.splitext(model_path)[0])
|
||||
|
||||
# Construct the command
|
||||
command = [
|
||||
'vela',
|
||||
*vela_args,
|
||||
'--output-dir', vela_dir,
|
||||
'--output-dir', build_dir,
|
||||
'--config', f'{vela_ini}/vela.ini',
|
||||
model_path
|
||||
]
|
||||
@ -36,36 +35,35 @@ def run_vela(model_path, model_name, args):
|
||||
result = subprocess.run(command, check=True, text=True, capture_output=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.stderr, file=sys.stderr)
|
||||
print(args.vela_args, file=sys.stderr)
|
||||
print(vela_args, file=sys.stderr)
|
||||
|
||||
C_GREEN = '\033[92m'
|
||||
C_RED = '\033[91m'
|
||||
C_BLUE = '\033[94m'
|
||||
C_RESET = '\033[0m'
|
||||
|
||||
csv_file_path = glob.glob(os.path.join(vela_dir, "*.csv"))[0]
|
||||
csv_file_path = glob.glob(os.path.join(build_dir, "*.csv"))[0]
|
||||
with open(csv_file_path, mode='r') as file:
|
||||
row = next(csv.DictReader(file))
|
||||
stoi = lambda x, d=1: str(int(float(x) / d))
|
||||
color = lambda c,x: c + x + C_RESET
|
||||
|
||||
summary = {
|
||||
C_BLUE + "Network:": row["network"],
|
||||
C_BLUE + "Network:": C_BLUE + row["network"],
|
||||
C_BLUE + "Accelerator Configuration:": C_GREEN + row["accelerator_configuration"],
|
||||
C_BLUE + "System Configuration:": row["system_config"],
|
||||
C_BLUE + "Memory Mode:": row["memory_mode"],
|
||||
C_BLUE + "System Configuration:": C_BLUE + row["system_config"],
|
||||
C_BLUE + "Memory Mode:": C_BLUE + row["memory_mode"],
|
||||
C_BLUE + "Compiler Mode: ": C_RED + vela_args[-1],
|
||||
C_BLUE + "Accelerator Clock:": stoi(row["core_clock"], 10**6) + " MHz",
|
||||
C_BLUE + "Accelerator Clock:": C_BLUE + stoi(row["core_clock"], 10**6) + " MHz",
|
||||
C_BLUE + "SRAM Usage:": C_RED + stoi(row["sram_memory_used"]) + " KiB",
|
||||
C_BLUE + "Flash Usage:": C_RED + stoi(row["off_chip_flash_memory_used"]) + " KiB",
|
||||
C_BLUE + "Inference Time:": "%.2f ms, %.2f inferences/s"%
|
||||
C_BLUE + "Inference Time:": C_GREEN + "%.2f ms, %.2f inferences/s"%
|
||||
(float(row["inference_time"]) * 1000, float(row["inferences_per_second"])),
|
||||
}
|
||||
print("", file=sys.stderr)
|
||||
for key, value in summary.items():
|
||||
print(f"{key:<{35}} {value:<{50}}", file=sys.stderr)
|
||||
print(C_RESET, file=sys.stderr, end="")
|
||||
return f'{vela_dir}/{model_name}_vela.tflite'
|
||||
print(C_RESET, file=sys.stderr)
|
||||
os.rename(f"{build_dir}/{model}_vela.tflite", f"{build_dir}/{model}.tflite")
|
||||
|
||||
|
||||
def main():
|
||||
@ -116,7 +114,8 @@ def main():
|
||||
else:
|
||||
args.vela_args += " --optimise " + tflm_builtin_index[model_name]["optimise"]
|
||||
# Compile the model using Vela and switch path to the new model.
|
||||
model_path = run_vela(model_path, model_name, args)
|
||||
vela_compile(model_path, args.build_dir, args.vela_args.split())
|
||||
model_path = os.path.join(args.build_dir, model_name + ".tflite")
|
||||
model_size = os.path.getsize(model_path)
|
||||
|
||||
# Generate model labels.
|
||||
|
||||
Loading…
Reference in New Issue
Block a user