mirror of
https://github.com/Refound-445/nonebot-plugin-nailongremove.git
synced 2025-11-04 21:22:43 +08:00
Merge remote-tracking branch 'origin/main'
This commit is contained in:
commit
2a5872b2c4
8
.idea/.gitignore
generated
vendored
8
.idea/.gitignore
generated
vendored
@ -1,8 +0,0 @@
|
|||||||
# 默认忽略的文件
|
|
||||||
/shelf/
|
|
||||||
/workspace.xml
|
|
||||||
# 基于编辑器的 HTTP 客户端请求
|
|
||||||
/httpRequests/
|
|
||||||
# Datasource local storage ignored files
|
|
||||||
/dataSources/
|
|
||||||
/dataSources.local.xml
|
|
||||||
6
.idea/inspectionProfiles/profiles_settings.xml
generated
6
.idea/inspectionProfiles/profiles_settings.xml
generated
@ -1,6 +0,0 @@
|
|||||||
<component name="InspectionProjectProfileManager">
|
|
||||||
<settings>
|
|
||||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
|
||||||
<version value="1.0" />
|
|
||||||
</settings>
|
|
||||||
</component>
|
|
||||||
4
.idea/misc.xml
generated
4
.idea/misc.xml
generated
@ -1,4 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project version="4">
|
|
||||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12" project-jdk-type="Python SDK" />
|
|
||||||
</project>
|
|
||||||
8
.idea/modules.xml
generated
8
.idea/modules.xml
generated
@ -1,8 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project version="4">
|
|
||||||
<component name="ProjectModuleManager">
|
|
||||||
<modules>
|
|
||||||
<module fileurl="file://$PROJECT_DIR$/.idea/nonebot-plugin-nailongremove.iml" filepath="$PROJECT_DIR$/.idea/nonebot-plugin-nailongremove.iml" />
|
|
||||||
</modules>
|
|
||||||
</component>
|
|
||||||
</project>
|
|
||||||
8
.idea/nonebot-plugin-nailongremove.iml
generated
8
.idea/nonebot-plugin-nailongremove.iml
generated
@ -1,8 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<module type="PYTHON_MODULE" version="4">
|
|
||||||
<component name="NewModuleRootManager">
|
|
||||||
<content url="file://$MODULE_DIR$" />
|
|
||||||
<orderEntry type="inheritedJdk" />
|
|
||||||
<orderEntry type="sourceFolder" forTests="false" />
|
|
||||||
</component>
|
|
||||||
</module>
|
|
||||||
6
.idea/vcs.xml
generated
6
.idea/vcs.xml
generated
@ -1,6 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project version="4">
|
|
||||||
<component name="VcsDirectoryMappings">
|
|
||||||
<mapping directory="" vcs="Git" />
|
|
||||||
</component>
|
|
||||||
</project>
|
|
||||||
@ -105,7 +105,7 @@ async def handle_function(bot: BaseBot, ev: BaseEvent, msg: UniMsg, session: Uni
|
|||||||
frames.append(temp_image)
|
frames.append(temp_image)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
break
|
break
|
||||||
commitInfo = process_gif_and_save_jpgs(frames, label, (224,224))
|
commitInfo = process_gif_and_save_jpgs(frames, label, (224, 224))
|
||||||
if commitInfo is None:
|
if commitInfo is None:
|
||||||
await nailong.finish(
|
await nailong.finish(
|
||||||
f"The new data has been saved to the directory {config.nailong_model_dir}\\records\\{label}, label: {label}.",
|
f"The new data has been saved to the directory {config.nailong_model_dir}\\records\\{label}, label: {label}.",
|
||||||
@ -137,7 +137,9 @@ async def handle_function(bot: BaseBot, ev: BaseEvent, msg: UniMsg, session: Uni
|
|||||||
]
|
]
|
||||||
if len(template_str_all) == 0:
|
if len(template_str_all) == 0:
|
||||||
continue
|
continue
|
||||||
template_str=template_str_all[random.randint(0, len(template_str_all) - 1)]
|
template_str = template_str_all[
|
||||||
|
random.randint(0, len(template_str_all) - 1)
|
||||||
|
]
|
||||||
mapping = {
|
mapping = {
|
||||||
"$event": ev,
|
"$event": ev,
|
||||||
"$target": msg.get_target(),
|
"$target": msg.get_target(),
|
||||||
|
|||||||
@ -38,7 +38,6 @@ else:
|
|||||||
file_path = os.path.join(str(config.nailong_model_dir), FILENAME)
|
file_path = os.path.join(str(config.nailong_model_dir), FILENAME)
|
||||||
model_info = api.model_info(REPO_ID)
|
model_info = api.model_info(REPO_ID)
|
||||||
|
|
||||||
|
|
||||||
def get_file_last_modified_time(file_path):
|
def get_file_last_modified_time(file_path):
|
||||||
try:
|
try:
|
||||||
timestamp = os.path.getmtime(file_path)
|
timestamp = os.path.getmtime(file_path)
|
||||||
@ -50,7 +49,6 @@ else:
|
|||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
local_time = get_file_last_modified_time(file_path)
|
local_time = get_file_last_modified_time(file_path)
|
||||||
if local_time is None or model_info.last_modified >= local_time:
|
if local_time is None or model_info.last_modified >= local_time:
|
||||||
hf_hub_download(
|
hf_hub_download(
|
||||||
@ -78,7 +76,7 @@ def _check_single(frame: np.ndarray, is_gif: bool = False) -> CheckSingleResult:
|
|||||||
input_image = Image.fromarray(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))
|
input_image = Image.fromarray(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))
|
||||||
|
|
||||||
if not os.path.exists(
|
if not os.path.exists(
|
||||||
os.path.join(str(config.nailong_model_dir), "online_temp"),
|
os.path.join(str(config.nailong_model_dir), "online_temp"),
|
||||||
):
|
):
|
||||||
os.makedirs(os.path.join(str(config.nailong_model_dir), "online_temp"))
|
os.makedirs(os.path.join(str(config.nailong_model_dir), "online_temp"))
|
||||||
image_path = os.path.join(
|
image_path = os.path.join(
|
||||||
@ -102,8 +100,8 @@ def _check_single(frame: np.ndarray, is_gif: bool = False) -> CheckSingleResult:
|
|||||||
)
|
)
|
||||||
os.remove(image_path)
|
os.remove(image_path)
|
||||||
if (
|
if (
|
||||||
"检测到的目标数量: " in result_info
|
"检测到的目标数量: " in result_info
|
||||||
and int(result_info.split("检测到的目标数量: ")[1].split("\n")[0]) < 1
|
and int(result_info.split("检测到的目标数量: ")[1].split("\n")[0]) < 1
|
||||||
):
|
):
|
||||||
return CheckSingleResult(ok=False, label=None, extra=frame)
|
return CheckSingleResult(ok=False, label=None, extra=frame)
|
||||||
if isinstance(result_image, str):
|
if isinstance(result_image, str):
|
||||||
@ -144,9 +142,9 @@ def _check_single(frame: np.ndarray, is_gif: bool = False) -> CheckSingleResult:
|
|||||||
|
|
||||||
if pad_w > 0 or pad_h > 0:
|
if pad_w > 0 or pad_h > 0:
|
||||||
result_img = result_img[
|
result_img = result_img[
|
||||||
pad_h // 2: pad_h // 2 + original_size[1],
|
pad_h // 2 : pad_h // 2 + original_size[1],
|
||||||
pad_w // 2: pad_w // 2 + original_size[0],
|
pad_w // 2 : pad_w // 2 + original_size[0],
|
||||||
]
|
]
|
||||||
return CheckSingleResult(ok=True, label="nailong", extra=result_img)
|
return CheckSingleResult(ok=True, label="nailong", extra=result_img)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -83,8 +83,8 @@ class FrameInfo:
|
|||||||
|
|
||||||
@run_sync
|
@run_sync
|
||||||
def _check_single(
|
def _check_single(
|
||||||
frame: np.ndarray,
|
frame: np.ndarray,
|
||||||
is_gif: bool = False,
|
is_gif: bool = False,
|
||||||
) -> CheckSingleResult[Optional[Detections]]:
|
) -> CheckSingleResult[Optional[Detections]]:
|
||||||
if is_gif:
|
if is_gif:
|
||||||
res = similarity_process(frame)
|
res = similarity_process(frame)
|
||||||
@ -127,8 +127,8 @@ def _check_single(
|
|||||||
|
|
||||||
|
|
||||||
async def check_single(
|
async def check_single(
|
||||||
frame: np.ndarray,
|
frame: np.ndarray,
|
||||||
is_gif: bool = False,
|
is_gif: bool = False,
|
||||||
) -> CheckSingleResult[FrameInfo]:
|
) -> CheckSingleResult[FrameInfo]:
|
||||||
if is_gif:
|
if is_gif:
|
||||||
res = await _check_single(frame, True)
|
res = await _check_single(frame, True)
|
||||||
|
|||||||
@ -7,13 +7,12 @@ import random
|
|||||||
import shutil
|
import shutil
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Any, Awaitable, Callable, Dict, Generic, Optional, TypeVar
|
from typing import Any, Awaitable, Callable, Dict, Generic, Optional, TypeVar
|
||||||
|
|
||||||
from typing_extensions import TypeAlias
|
from typing_extensions import TypeAlias
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import torch
|
import torch
|
||||||
import torch.nn.functional as F
|
|
||||||
from ...config import config
|
from ...config import config
|
||||||
from ...frame_source import FrameSource
|
from ...frame_source import FrameSource
|
||||||
|
|
||||||
@ -22,20 +21,24 @@ T = TypeVar("T")
|
|||||||
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
||||||
|
|
||||||
if config.nailong_similarity_on:
|
if config.nailong_similarity_on:
|
||||||
from huggingface_hub import PyTorchModelHubMixin
|
|
||||||
from torch import nn
|
|
||||||
import torchvision
|
|
||||||
from nonebot import logger
|
|
||||||
import faiss
|
|
||||||
import json
|
import json
|
||||||
import sklearn
|
|
||||||
|
import faiss
|
||||||
|
import torchvision
|
||||||
|
from huggingface_hub import PyTorchModelHubMixin
|
||||||
|
from nonebot import logger
|
||||||
|
from torch import nn
|
||||||
from torchvision import transforms
|
from torchvision import transforms
|
||||||
|
|
||||||
transform = transforms.Compose([
|
transform = transforms.Compose(
|
||||||
transforms.ToTensor(),
|
[
|
||||||
transforms.Normalize(mean=[0.5], std=[0.5]) # Assuming grayscale or single-channel
|
transforms.ToTensor(),
|
||||||
])
|
transforms.Normalize(
|
||||||
|
mean=[0.5],
|
||||||
|
std=[0.5],
|
||||||
|
), # Assuming grayscale or single-channel
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
class MyModel(
|
class MyModel(
|
||||||
nn.Module,
|
nn.Module,
|
||||||
@ -44,21 +47,25 @@ if config.nailong_similarity_on:
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.resnet = torchvision.models.resnet18(pretrained=False)
|
self.resnet = torchvision.models.resnet18(pretrained=False)
|
||||||
self.resnet.fc = nn.Linear(self.resnet.fc.in_features, 5) # Output dimension is 5
|
self.resnet.fc = nn.Linear(
|
||||||
|
self.resnet.fc.in_features,
|
||||||
|
5,
|
||||||
|
) # Output dimension is 5
|
||||||
|
|
||||||
def forward(self, x):
|
def forward(self, x):
|
||||||
return self.resnet(x)
|
return self.resnet(x)
|
||||||
|
|
||||||
|
features_model = MyModel.from_pretrained(
|
||||||
features_model = MyModel.from_pretrained("refoundd/NailongFeatures", ).to(device)
|
"refoundd/NailongFeatures",
|
||||||
index_path = config.nailong_model_dir / 'records.index'
|
).to(device)
|
||||||
json_path = config.nailong_model_dir / 'records.json'
|
index_path = config.nailong_model_dir / "records.index"
|
||||||
|
json_path = config.nailong_model_dir / "records.json"
|
||||||
if os.path.exists(index_path):
|
if os.path.exists(index_path):
|
||||||
index = faiss.read_index(str(index_path))
|
index = faiss.read_index(str(index_path))
|
||||||
else:
|
else:
|
||||||
index = faiss.IndexFlatL2(512)
|
index = faiss.IndexFlatL2(512)
|
||||||
if os.path.exists(json_path):
|
if os.path.exists(json_path):
|
||||||
with open(json_path, 'r') as f:
|
with open(json_path, "r") as f:
|
||||||
index_cls = json.load(f)
|
index_cls = json.load(f)
|
||||||
else:
|
else:
|
||||||
index_cls = {}
|
index_cls = {}
|
||||||
@ -66,9 +73,10 @@ if config.nailong_similarity_on:
|
|||||||
try:
|
try:
|
||||||
res = faiss.StandardGpuResources() # 创建GPU资源
|
res = faiss.StandardGpuResources() # 创建GPU资源
|
||||||
index = faiss.index_cpu_to_gpu(res, 0, index) # 将CPU索引转移到GPU
|
index = faiss.index_cpu_to_gpu(res, 0, index) # 将CPU索引转移到GPU
|
||||||
except Exception as e:
|
except Exception:
|
||||||
logger.warning("load faiss-gpu failed.Please check your GPU device and install faiss-gpu first.")
|
logger.warning(
|
||||||
|
"load faiss-gpu failed.Please check your GPU device and install faiss-gpu first.",
|
||||||
|
)
|
||||||
|
|
||||||
def hook(model, input, output):
|
def hook(model, input, output):
|
||||||
embeddings = input[0]
|
embeddings = input[0]
|
||||||
@ -78,7 +86,6 @@ if config.nailong_similarity_on:
|
|||||||
d, i = index.search(vector, 1)
|
d, i = index.search(vector, 1)
|
||||||
return 1 - d[0][0], i[0][0], vector
|
return 1 - d[0][0], i[0][0], vector
|
||||||
|
|
||||||
|
|
||||||
features_model.resnet.fc.register_forward_hook(hook)
|
features_model.resnet.fc.register_forward_hook(hook)
|
||||||
features_model.eval()
|
features_model.eval()
|
||||||
|
|
||||||
@ -112,9 +119,9 @@ FrameChecker: TypeAlias = Callable[
|
|||||||
|
|
||||||
|
|
||||||
async def race_check(
|
async def race_check(
|
||||||
checker: FrameChecker[T],
|
checker: FrameChecker[T],
|
||||||
frames: FrameSource,
|
frames: FrameSource,
|
||||||
concurrency: int = config.nailong_concurrency,
|
concurrency: int = config.nailong_concurrency,
|
||||||
) -> Optional[CheckSingleResult[T]]:
|
) -> Optional[CheckSingleResult[T]]:
|
||||||
iterator = iter(frames)
|
iterator = iter(frames)
|
||||||
if config.nailong_similarity_on:
|
if config.nailong_similarity_on:
|
||||||
@ -182,7 +189,11 @@ async def race_check(
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def similarity_process(image1: np.ndarray, dsize=(224, 224), similarity_threshold=1) -> Optional[CheckSingleResult]:
|
def similarity_process(
|
||||||
|
image1: np.ndarray,
|
||||||
|
dsize=(224, 224),
|
||||||
|
similarity_threshold=1,
|
||||||
|
) -> Optional[CheckSingleResult]:
|
||||||
# image1 = cv2.cvtColor(image1, cv2.COLOR_BGR2RGB)
|
# image1 = cv2.cvtColor(image1, cv2.COLOR_BGR2RGB)
|
||||||
image1 = cv2.resize(image1, dsize, interpolation=cv2.INTER_LINEAR)
|
image1 = cv2.resize(image1, dsize, interpolation=cv2.INTER_LINEAR)
|
||||||
image1_tensor = transform(image1).unsqueeze(0).to(device)
|
image1_tensor = transform(image1).unsqueeze(0).to(device)
|
||||||
@ -198,22 +209,25 @@ def similarity_process(image1: np.ndarray, dsize=(224, 224), similarity_threshol
|
|||||||
|
|
||||||
def process_gif_and_save_jpgs(frames, label, dsize=(224, 224), similarity_threshold=1):
|
def process_gif_and_save_jpgs(frames, label, dsize=(224, 224), similarity_threshold=1):
|
||||||
if (
|
if (
|
||||||
len(
|
len(
|
||||||
list(
|
list(
|
||||||
glob.glob(
|
glob.glob(str(config.nailong_model_dir / "records/*/*.jpg")),
|
||||||
str(config.nailong_model_dir / "records/*/*.jpg")
|
),
|
||||||
),
|
)
|
||||||
),
|
>= config.nailong_similarity_max_storage
|
||||||
)
|
and config.nailong_hf_token is not None
|
||||||
>= config.nailong_similarity_max_storage and config.nailong_hf_token is not None
|
|
||||||
):
|
):
|
||||||
zip_filename = shutil.make_archive(
|
zip_filename = shutil.make_archive(
|
||||||
config.nailong_model_dir / "{}_records".format(datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")),
|
config.nailong_model_dir
|
||||||
|
/ "{}_records".format(
|
||||||
|
datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"),
|
||||||
|
),
|
||||||
"zip",
|
"zip",
|
||||||
config.nailong_model_dir / "records"
|
config.nailong_model_dir / "records",
|
||||||
)
|
)
|
||||||
shutil.rmtree(config.nailong_model_dir / "records")
|
shutil.rmtree(config.nailong_model_dir / "records")
|
||||||
from huggingface_hub import HfApi
|
from huggingface_hub import HfApi
|
||||||
|
|
||||||
api = HfApi()
|
api = HfApi()
|
||||||
commitInfo = api.upload_file(
|
commitInfo = api.upload_file(
|
||||||
path_or_fileobj=zip_filename,
|
path_or_fileobj=zip_filename,
|
||||||
@ -255,6 +269,6 @@ def process_gif_and_save_jpgs(frames, label, dsize=(224, 224), similarity_thresh
|
|||||||
index_cls[str(index.ntotal - 1)] = label
|
index_cls[str(index.ntotal - 1)] = label
|
||||||
count += 1
|
count += 1
|
||||||
faiss.write_index(index, str(index_path))
|
faiss.write_index(index, str(index_path))
|
||||||
with open(json_path, 'w') as f:
|
with open(json_path, "w") as f:
|
||||||
json.dump(index_cls, f)
|
json.dump(index_cls, f)
|
||||||
return commitInfo
|
return commitInfo
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user