mirror of
https://github.com/Refound-445/nonebot-plugin-nailongremove.git
synced 2025-11-04 21:22:43 +08:00
up
This commit is contained in:
parent
0494b1301d
commit
ba1b238e9e
@ -33,6 +33,7 @@ class Config(BaseModel):
|
||||
|
||||
nailong_model1_yolox_size: Tuple[int, int] = (416, 416)
|
||||
nailong_model1_type: str = "tiny"
|
||||
nailong_model1_score: float = 0.5
|
||||
|
||||
nailong_github_token: Optional[str] = None
|
||||
|
||||
|
||||
@ -32,16 +32,17 @@ if cuda_available:
|
||||
model.cuda()
|
||||
|
||||
|
||||
SIZE = 224
|
||||
|
||||
|
||||
def check_image(image: np.ndarray) -> "CheckResult":
|
||||
if image.shape[0] < 224 or image.shape[1] < 224:
|
||||
if image.shape[0] < SIZE or image.shape[1] < SIZE:
|
||||
return False
|
||||
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
|
||||
image = cv2.resize(image, (224, 224))
|
||||
image = cv2.resize(image, (SIZE, SIZE))
|
||||
image = transform(image)
|
||||
image = image.unsqueeze(0) # type: ignore
|
||||
with torch.no_grad():
|
||||
output = model(image.to(device)) # type: ignore
|
||||
_, pred = torch.max(output, 1)
|
||||
if pred.item() == 1:
|
||||
return True
|
||||
return False
|
||||
return pred.item() == 1
|
||||
|
||||
@ -65,7 +65,14 @@ def get_latest_model() -> Path:
|
||||
|
||||
model_path = get_latest_model()
|
||||
|
||||
session = onnxruntime.InferenceSession(model_path)
|
||||
session = onnxruntime.InferenceSession(
|
||||
model_path,
|
||||
providers=[
|
||||
"TensorrtExecutionProvider",
|
||||
"CUDAExecutionProvider",
|
||||
"CPUExecutionProvider",
|
||||
],
|
||||
)
|
||||
input_shape = config.nailong_model1_yolox_size
|
||||
|
||||
|
||||
@ -85,21 +92,27 @@ def check_image(image: np.ndarray) -> "CheckResult":
|
||||
boxes_xyxy[:, 3] = boxes[:, 1] + boxes[:, 3] / 2.0
|
||||
boxes_xyxy /= ratio
|
||||
dets = multiclass_nms(boxes_xyxy, scores, nms_thr=0.45, score_thr=0.1)
|
||||
if dets is not None:
|
||||
final_boxes, final_scores, final_cls_inds = (
|
||||
dets[:, :4], # type: ignore
|
||||
dets[:, 4], # type: ignore
|
||||
dets[:, 5], # type: ignore
|
||||
if dets is None:
|
||||
return False
|
||||
|
||||
final_boxes, final_scores, final_cls_inds = (
|
||||
dets[:, :4], # type: ignore
|
||||
dets[:, 4], # type: ignore
|
||||
dets[:, 5], # type: ignore
|
||||
)
|
||||
has = any(
|
||||
True
|
||||
for c, s in zip(final_cls_inds, final_scores)
|
||||
if c == 1 and s >= config.nailong_model1_score
|
||||
)
|
||||
if has:
|
||||
image = vis(
|
||||
image,
|
||||
final_boxes,
|
||||
final_scores,
|
||||
final_cls_inds,
|
||||
conf=0.3,
|
||||
class_names=COCO_CLASSES,
|
||||
)
|
||||
for i in range(len(final_scores)):
|
||||
if final_cls_inds[i] == 1 and final_scores[i] > 0.5:
|
||||
image = vis(
|
||||
image,
|
||||
final_boxes,
|
||||
final_scores,
|
||||
final_cls_inds,
|
||||
conf=0.3,
|
||||
class_names=COCO_CLASSES,
|
||||
)
|
||||
return True, image
|
||||
return True, image
|
||||
return False
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
from typing import Callable, Optional, Tuple, Union
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Tuple, Union
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
import torch
|
||||
@ -7,11 +9,12 @@ from nonebot import logger
|
||||
|
||||
from .config import config
|
||||
|
||||
ModelValidator: TypeAlias = Callable[[Path], Any]
|
||||
ModelVersionGetter: TypeAlias = Callable[
|
||||
[],
|
||||
Union[
|
||||
str,
|
||||
Tuple[str, Optional[str]],
|
||||
Tuple[str, ModelValidator],
|
||||
],
|
||||
]
|
||||
|
||||
@ -35,6 +38,21 @@ def format_github_repo_download_base_url(
|
||||
)
|
||||
|
||||
|
||||
def make_sha1_validator(expected: str):
|
||||
def validator(file_path: Path):
|
||||
sha = hashlib.sha1() # noqa: S324
|
||||
with file_path.open("rb") as f:
|
||||
while True:
|
||||
data = f.read(1048576) # 1024 * 1024
|
||||
if not data:
|
||||
break
|
||||
sha.update(data)
|
||||
if (got := sha.hexdigest()) != expected:
|
||||
raise ValueError(f"Invalid SHA1, expected {expected}, got {got}")
|
||||
|
||||
return validator
|
||||
|
||||
|
||||
def make_github_repo_sha_getter(
|
||||
owner: str,
|
||||
repo: str,
|
||||
@ -42,7 +60,7 @@ def make_github_repo_sha_getter(
|
||||
folder: str,
|
||||
filename: str,
|
||||
):
|
||||
def getter() -> Tuple[str, str]:
|
||||
def getter() -> str:
|
||||
github = get_github()
|
||||
ret = github.rest.git.get_tree(owner, repo, f"{branch}:{folder}")
|
||||
sha = next(
|
||||
@ -50,7 +68,7 @@ def make_github_repo_sha_getter(
|
||||
for x in ret.parsed_data.tree
|
||||
if x.path == filename and isinstance(x.sha, str)
|
||||
)
|
||||
return sha[:7], sha
|
||||
return sha[:7]
|
||||
|
||||
return getter
|
||||
|
||||
@ -96,6 +114,8 @@ def ensure_model(
|
||||
logger.info(f"Using model {model_filename} (version {local_ver or 'Unknown'})")
|
||||
return model_path
|
||||
|
||||
ver = None
|
||||
validator = None
|
||||
try:
|
||||
ver_ret = model_version_getter()
|
||||
except Exception as e:
|
||||
@ -107,25 +127,23 @@ def ensure_model(
|
||||
logger.opt(exception=e).debug("Stacktrace")
|
||||
else:
|
||||
raise
|
||||
ver = None
|
||||
sha = None
|
||||
else:
|
||||
if isinstance(ver_ret, tuple):
|
||||
ver, sha = ver_ret
|
||||
ver, validator = ver_ret
|
||||
else:
|
||||
ver = ver_ret
|
||||
sha = None
|
||||
|
||||
def download():
|
||||
if not config.nailong_model_dir.exists():
|
||||
config.nailong_model_dir.mkdir(parents=True)
|
||||
url = f"{model_base_url}/{model_filename}"
|
||||
torch.hub.download_url_to_file(
|
||||
url,
|
||||
str(model_path),
|
||||
hash_prefix=sha,
|
||||
progress=True,
|
||||
)
|
||||
torch.hub.download_url_to_file(url, str(model_path), progress=True)
|
||||
if validator is not None:
|
||||
try:
|
||||
validator(model_path)
|
||||
except Exception:
|
||||
model_path.unlink(missing_ok=True)
|
||||
raise
|
||||
|
||||
if ver is None:
|
||||
logger.warning("Skip update.")
|
||||
|
||||
26
pdm.lock
generated
26
pdm.lock
generated
@ -5,7 +5,7 @@
|
||||
groups = ["default", "dev", "model1"]
|
||||
strategy = ["inherit_metadata"]
|
||||
lock_version = "4.5.0"
|
||||
content_hash = "sha256:76280489ad4609f78fb53666b7f7e47981abf61e1feb111f76e1f7217e56d80c"
|
||||
content_hash = "sha256:8d60e95796a305224579cea25022c1b1ea6532b2f573ef03af8f9cd57f089399"
|
||||
|
||||
[[metadata.targets]]
|
||||
requires_python = "~=3.9"
|
||||
@ -1561,6 +1561,30 @@ files = [
|
||||
{file = "onnxruntime-1.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:38475e29a95c5f6c62c2c603d69fc7d4c6ccbf4df602bd567b86ae1138881c49"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "onnxruntime-gpu"
|
||||
version = "1.19.2"
|
||||
summary = "ONNX Runtime is a runtime accelerator for Machine Learning models"
|
||||
groups = ["model1"]
|
||||
dependencies = [
|
||||
"coloredlogs",
|
||||
"flatbuffers",
|
||||
"numpy>=1.21.6",
|
||||
"packaging",
|
||||
"protobuf",
|
||||
"sympy",
|
||||
]
|
||||
files = [
|
||||
{file = "onnxruntime_gpu-1.19.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a49740e079e7c5215830d30cde3df792e903df007aa0b0fd7aa797937061b27a"},
|
||||
{file = "onnxruntime_gpu-1.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:b895920bb5e4241299f68874e0becdc2635ea0142939c11e7ff5ae5b28993613"},
|
||||
{file = "onnxruntime_gpu-1.19.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:562fc7c755393eaad9751e56149339dd201ffbfdb3ef5f43ff21d0619ba9045f"},
|
||||
{file = "onnxruntime_gpu-1.19.2-cp311-cp311-win_amd64.whl", hash = "sha256:522f7495918176cb8c1a3c78bde7152d984f7096acc786c73a27643af8af87c9"},
|
||||
{file = "onnxruntime_gpu-1.19.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:554a02a3fac0119707eb87327908afd21c4e6f0fa5bf9a034398f098adc316c5"},
|
||||
{file = "onnxruntime_gpu-1.19.2-cp312-cp312-win_amd64.whl", hash = "sha256:e7c6165a405027e3c0f11d189ae7013b5d66919b3381f9bfb3405c0c0cf07968"},
|
||||
{file = "onnxruntime_gpu-1.19.2-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9e369f01f55ea726ae5d28f18236426e52e97c433f0b7682054e61c478a06c9"},
|
||||
{file = "onnxruntime_gpu-1.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:c8b8128174b0470537e9f4983aeecc002a435d13914970c2af2f41d244ef2781"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opencv-python"
|
||||
version = "4.10.0.84"
|
||||
|
||||
@ -31,7 +31,7 @@ homepage = "https://github.com/Refound-445/nonebot-plugin-nailongremove"
|
||||
repository = "https://github.com/Refound-445/nonebot-plugin-nailongremove"
|
||||
|
||||
[project.optional-dependencies]
|
||||
model1 = ["onnxruntime>=1.19.2"]
|
||||
model1 = ["onnxruntime>=1.19.2", "onnxruntime-gpu>=1.19.2"]
|
||||
|
||||
[build-system]
|
||||
requires = ["pdm-backend"]
|
||||
|
||||
Loading…
Reference in New Issue
Block a user