mirror of
https://github.com/Refound-445/nonebot-plugin-nailongremove.git
synced 2025-11-04 21:22:43 +08:00
up
This commit is contained in:
parent
ba1b238e9e
commit
8ff3a8054d
@ -12,6 +12,8 @@ class ModelType(int, Enum):
|
||||
|
||||
|
||||
class Config(BaseModel):
|
||||
proxy: Optional[str] = None
|
||||
|
||||
nailong_bypass_superuser: bool = True
|
||||
nailong_bypass_admin: bool = True
|
||||
nailong_need_admin: bool = False
|
||||
@ -34,6 +36,7 @@ class Config(BaseModel):
|
||||
nailong_model1_yolox_size: Tuple[int, int] = (416, 416)
|
||||
nailong_model1_type: str = "tiny"
|
||||
nailong_model1_score: float = 0.5
|
||||
nailong_model1_try_to_use_gpu: bool = True
|
||||
|
||||
nailong_github_token: Optional[str] = None
|
||||
|
||||
|
||||
@ -6,18 +6,17 @@ import torch
|
||||
from torch import nn
|
||||
from torchvision import transforms
|
||||
|
||||
from ..utils import ensure_model_from_github_repo
|
||||
from .update import GitHubRepoModelUpdater
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import CheckResult
|
||||
|
||||
model_path = ensure_model_from_github_repo(
|
||||
model_path = GitHubRepoModelUpdater(
|
||||
"spawner1145",
|
||||
"NailongRecognize",
|
||||
"main",
|
||||
"",
|
||||
"nailong.pth",
|
||||
)
|
||||
).get()
|
||||
|
||||
cuda_available = torch.cuda.is_available()
|
||||
device = torch.device("cuda" if cuda_available else "cpu")
|
||||
@ -31,7 +30,6 @@ model.eval()
|
||||
if cuda_available:
|
||||
model.cuda()
|
||||
|
||||
|
||||
SIZE = 224
|
||||
|
||||
|
||||
|
||||
@ -1,77 +1,40 @@
|
||||
from contextlib import suppress
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import numpy as np
|
||||
import onnxruntime
|
||||
from nonebot import logger
|
||||
from yarl import URL
|
||||
|
||||
from ..config import config
|
||||
from ..utils import TIME_FORMAT_TEMPLATE, ensure_model, get_github, get_ver_filename
|
||||
from .update import GitHubLatestReleaseModelUpdater
|
||||
from .yolox_utils import demo_postprocess, multiclass_nms, preprocess, vis
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import CheckResult
|
||||
|
||||
COCO_CLASSES = ("_background_", "nailong", "anime", "human", "emoji", "long", "other")
|
||||
|
||||
model_path = GitHubLatestReleaseModelUpdater(
|
||||
"nkxingxh",
|
||||
"NailongDetection",
|
||||
lambda x: x.endswith(f"_{config.nailong_model1_type}.onnx"),
|
||||
).get()
|
||||
|
||||
model_filename_sfx = f"_{config.nailong_model1_type}.onnx"
|
||||
|
||||
|
||||
def find_local_model() -> Path:
|
||||
local_models = [
|
||||
x
|
||||
for x in config.nailong_model_dir.iterdir()
|
||||
if x.name.endswith(model_filename_sfx)
|
||||
]
|
||||
local_models.sort(key=lambda x: x.stat().st_mtime, reverse=True)
|
||||
if not local_models:
|
||||
raise FileNotFoundError("No local model found")
|
||||
p = local_models[0]
|
||||
version = (
|
||||
vp.read_text("u8")
|
||||
if (vp := (p.parent / get_ver_filename(p.name))).exists()
|
||||
else "Unknown"
|
||||
)
|
||||
logger.info(f"Using local cached model: {p.name} (version {version})")
|
||||
return p
|
||||
|
||||
|
||||
def get_latest_model() -> Path:
|
||||
if not config.nailong_auto_update_model:
|
||||
with suppress(FileNotFoundError):
|
||||
return find_local_model()
|
||||
|
||||
github = get_github()
|
||||
try:
|
||||
ret = github.rest.repos.get_latest_release("nkxingxh", "NailongDetection")
|
||||
asset = next(
|
||||
x for x in ret.parsed_data.assets if x.name.endswith(model_filename_sfx)
|
||||
)
|
||||
url = URL(asset.browser_download_url)
|
||||
version = asset.updated_at.strftime(TIME_FORMAT_TEMPLATE)
|
||||
return ensure_model(
|
||||
str(url.parent),
|
||||
url.name,
|
||||
lambda: version,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get latest model: {type(e).__name__}: {e}")
|
||||
logger.opt(exception=e).debug("Stacktrace")
|
||||
return find_local_model()
|
||||
|
||||
|
||||
model_path = get_latest_model()
|
||||
labels_path = GitHubLatestReleaseModelUpdater(
|
||||
"nkxingxh",
|
||||
"NailongDetection",
|
||||
lambda x: x == "labels.txt",
|
||||
).get()
|
||||
labels = labels_path.read_text("u8").splitlines()
|
||||
|
||||
session = onnxruntime.InferenceSession(
|
||||
model_path,
|
||||
providers=[
|
||||
"TensorrtExecutionProvider",
|
||||
"CUDAExecutionProvider",
|
||||
"CPUExecutionProvider",
|
||||
],
|
||||
providers=(
|
||||
[
|
||||
"TensorrtExecutionProvider",
|
||||
"CUDAExecutionProvider",
|
||||
"CPUExecutionProvider",
|
||||
]
|
||||
if config.nailong_model1_try_to_use_gpu
|
||||
else ["CPUExecutionProvider"]
|
||||
),
|
||||
)
|
||||
input_shape = config.nailong_model1_yolox_size
|
||||
|
||||
@ -112,7 +75,7 @@ def check_image(image: np.ndarray) -> "CheckResult":
|
||||
final_scores,
|
||||
final_cls_inds,
|
||||
conf=0.3,
|
||||
class_names=COCO_CLASSES,
|
||||
class_names=labels,
|
||||
)
|
||||
return True, image
|
||||
return False
|
||||
|
||||
267
nonebot_plugin_nailongremove/model/update.py
Normal file
267
nonebot_plugin_nailongremove/model/update.py
Normal file
@ -0,0 +1,267 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Generic, Optional, TypeVar
|
||||
from typing_extensions import override
|
||||
|
||||
import httpx
|
||||
from githubkit import GitHub
|
||||
from nonebot import logger
|
||||
from tqdm import tqdm
|
||||
|
||||
from ..config import config
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
TIME_FORMAT_TEMPLATE = "%Y-%m-%d_%H-%M-%S"
|
||||
|
||||
|
||||
class ProxyGitHub(GitHub):
|
||||
@override
|
||||
def _get_client_defaults(self):
|
||||
return {**super()._get_client_defaults(), "proxy": config.proxy}
|
||||
|
||||
|
||||
def get_github():
|
||||
return ProxyGitHub(config.nailong_github_token, auto_retry=False)
|
||||
|
||||
|
||||
def get_ver_filename(filename: str) -> str:
|
||||
return f"{filename}.ver.txt"
|
||||
|
||||
|
||||
def get_ver_path(filename: str) -> Path:
|
||||
return config.nailong_model_dir / get_ver_filename(filename)
|
||||
|
||||
|
||||
def progress_download(resp: httpx.Response, file_path: Path):
|
||||
if not (file_dir := file_path.parent).exists():
|
||||
file_dir.mkdir(parents=True)
|
||||
prog = tqdm(unit="B", unit_scale=True, unit_divisor=1024)
|
||||
with file_path.open("wb") as f, prog: # fmt: skip
|
||||
for b in resp.iter_bytes():
|
||||
if (prog.total is None) and (c_len := resp.headers.get("Content-Length")):
|
||||
prog.total = int(c_len)
|
||||
prog.update(len(b))
|
||||
f.write(b)
|
||||
|
||||
|
||||
def httpx_progress_download(file_path: Path, *args, **kwargs):
|
||||
with httpx.stream(*args, **kwargs) as resp:
|
||||
return progress_download(resp.raise_for_status(), file_path)
|
||||
|
||||
|
||||
def github_progress_download(github: GitHub, file_path: Path, *args, **kwargs):
|
||||
with github.get_sync_client() as cli, cli.stream(*args, **kwargs) as resp:
|
||||
return progress_download(resp.raise_for_status(), file_path)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelInfo(Generic[T]):
|
||||
download_url: str
|
||||
filename: str
|
||||
version: str
|
||||
extra: T
|
||||
|
||||
|
||||
class ModelUpdater(ABC):
|
||||
@abstractmethod
|
||||
def find_from_local(self) -> Optional[Path]: ...
|
||||
|
||||
@abstractmethod
|
||||
def get_info(self) -> ModelInfo: ...
|
||||
|
||||
def check_local_ver(self, info: ModelInfo) -> Optional[str]:
|
||||
if (config.nailong_model_dir / info.filename).exists() and (
|
||||
ver_path := get_ver_path(info.filename)
|
||||
).exists():
|
||||
return ver_path.read_text(encoding="u8").strip()
|
||||
return None
|
||||
|
||||
def save_local_ver(self, info: ModelInfo, clear: bool = False):
|
||||
p = get_ver_path(info.filename)
|
||||
if clear:
|
||||
p.unlink(missing_ok=True)
|
||||
else:
|
||||
p.write_text(info.version, encoding="u8")
|
||||
|
||||
def _download(self, tmp_path: Path, info: ModelInfo):
|
||||
httpx_progress_download(
|
||||
tmp_path,
|
||||
"GET",
|
||||
info.download_url,
|
||||
proxy=config.proxy,
|
||||
follow_redirects=True,
|
||||
)
|
||||
|
||||
def download(self, info: ModelInfo):
|
||||
tmp_path = config.nailong_model_dir / f"{info.filename}.{info.version}"
|
||||
try:
|
||||
self._download(tmp_path, info)
|
||||
except Exception:
|
||||
tmp_path.unlink(missing_ok=True)
|
||||
raise
|
||||
else:
|
||||
tmp_path.rename(config.nailong_model_dir / info.filename)
|
||||
|
||||
def validate(self, path: Path, info: ModelInfo) -> Any: # noqa: ARG002
|
||||
"""please raise Error when validation failed"""
|
||||
return
|
||||
|
||||
def validate_with_unlink(self, path: Path, info: ModelInfo) -> Any:
|
||||
try:
|
||||
return self.validate(path, info)
|
||||
except Exception:
|
||||
path.unlink(missing_ok=True)
|
||||
self.save_local_ver(info, clear=True)
|
||||
raise
|
||||
|
||||
def _get(self) -> Path:
|
||||
if (not config.nailong_auto_update_model) and (local := self.find_from_local()):
|
||||
return local
|
||||
|
||||
try:
|
||||
info = self.get_info()
|
||||
except Exception as e:
|
||||
if not (local := self.find_from_local()):
|
||||
raise
|
||||
logger.error(
|
||||
f"Failed to get model info for {type(self).__name__}, skipping update: "
|
||||
f"{type(e).__name__}: {e}",
|
||||
)
|
||||
logger.debug("Stacktrace")
|
||||
return local
|
||||
|
||||
model_path = config.nailong_model_dir / info.filename
|
||||
local_ver = self.check_local_ver(info)
|
||||
|
||||
if model_path.exists():
|
||||
try:
|
||||
self.validate_with_unlink(model_path, info)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Validation for model {info.filename} failed, re-downloading: "
|
||||
f"{type(e).__name__}: {e}",
|
||||
)
|
||||
|
||||
if (not model_path.exists()) or (local_ver != info.version):
|
||||
from_tip = f"from version {local_ver or 'Unknown'} " if local_ver else ""
|
||||
logger.info(
|
||||
f"Updating model {info.filename} {from_tip}to version {info.version}",
|
||||
)
|
||||
self.download(info)
|
||||
self.save_local_ver(info)
|
||||
self.validate_with_unlink(model_path, info)
|
||||
|
||||
return model_path
|
||||
|
||||
def get(self):
|
||||
p = self._get()
|
||||
logger.info(f"Using model {p.name}")
|
||||
return p
|
||||
|
||||
|
||||
class GitHubModelUpdater(ModelUpdater):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.github = get_github()
|
||||
|
||||
|
||||
class GitHubDownloadModelUpdater(GitHubModelUpdater):
|
||||
@override
|
||||
def _download(self, tmp_path: Path, info: ModelInfo):
|
||||
github_progress_download(
|
||||
self.github,
|
||||
tmp_path,
|
||||
"GET",
|
||||
info.download_url,
|
||||
follow_redirects=True,
|
||||
)
|
||||
|
||||
|
||||
class GitHubRepoModelUpdater(GitHubModelUpdater):
|
||||
def __init__(self, owner: str, repo: str, branch: str, path: str) -> None:
|
||||
super().__init__()
|
||||
self.owner = owner
|
||||
self.repo = repo
|
||||
self.branch = branch
|
||||
self.path = path
|
||||
|
||||
p = self.path.rsplit("/", 1)
|
||||
self.folder = p[0] if len(p) > 1 else ""
|
||||
self.filename = p[-1]
|
||||
|
||||
def format_download_url(self) -> str:
|
||||
folder = f"{self.folder}/" if self.folder else ""
|
||||
return (
|
||||
f"https://github.com/{self.owner}/{self.repo}"
|
||||
f"/raw/refs/heads/{self.branch}/{folder}{self.filename}"
|
||||
)
|
||||
|
||||
@override
|
||||
def find_from_local(self) -> Optional[Path]:
|
||||
if (p := (config.nailong_model_dir / self.filename)).exists():
|
||||
return p
|
||||
return None
|
||||
|
||||
@override
|
||||
def get_info(self) -> ModelInfo[None]:
|
||||
ret = self.github.rest.git.get_tree(
|
||||
self.owner,
|
||||
self.repo,
|
||||
f"{self.branch}:{self.folder}",
|
||||
)
|
||||
it = next(
|
||||
x
|
||||
for x in ret.parsed_data.tree
|
||||
if x.path == self.filename and isinstance(x.sha, str)
|
||||
)
|
||||
assert it.sha
|
||||
return ModelInfo(
|
||||
self.format_download_url(),
|
||||
self.filename,
|
||||
it.sha[:7],
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
class GitHubLatestReleaseModelUpdater(GitHubModelUpdater):
|
||||
def __init__(
|
||||
self,
|
||||
owner: str,
|
||||
repo: str,
|
||||
filename_checker: Optional[Callable[[str], bool]] = None,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self.owner = owner
|
||||
self.repo = repo
|
||||
self.filename_checker = filename_checker or (lambda _: True)
|
||||
|
||||
def format_download_url(self, tag: str, filename: str) -> str:
|
||||
return (
|
||||
f"https://github.com/{self.owner}/{self.repo}/"
|
||||
f"releases/download/{tag}/{filename}"
|
||||
)
|
||||
|
||||
@override
|
||||
def find_from_local(self) -> Optional[Path]:
|
||||
fs = [
|
||||
x
|
||||
for x in config.nailong_model_dir.iterdir()
|
||||
if x.is_file() and self.filename_checker(x.name)
|
||||
]
|
||||
if fs:
|
||||
fs.sort(key=lambda x: x.stat().st_mtime, reverse=True)
|
||||
return fs[0]
|
||||
return None
|
||||
|
||||
@override
|
||||
def get_info(self) -> ModelInfo[None]:
|
||||
ret = self.github.rest.repos.get_latest_release(self.owner, self.repo)
|
||||
asset = next(x for x in ret.parsed_data.assets if self.filename_checker(x.name))
|
||||
return ModelInfo(
|
||||
self.format_download_url(ret.parsed_data.tag_name, asset.name),
|
||||
asset.name,
|
||||
asset.updated_at.strftime(TIME_FORMAT_TEMPLATE),
|
||||
None,
|
||||
)
|
||||
@ -1,183 +0,0 @@
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Tuple, Union
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
import torch
|
||||
from githubkit import GitHub
|
||||
from nonebot import logger
|
||||
|
||||
from .config import config
|
||||
|
||||
ModelValidator: TypeAlias = Callable[[Path], Any]
|
||||
ModelVersionGetter: TypeAlias = Callable[
|
||||
[],
|
||||
Union[
|
||||
str,
|
||||
Tuple[str, ModelValidator],
|
||||
],
|
||||
]
|
||||
|
||||
|
||||
def get_github():
|
||||
return GitHub(config.nailong_github_token, auto_retry=False)
|
||||
|
||||
|
||||
def format_github_release_download_base_url(owner: str, name: str, tag: str):
|
||||
return f"https://github.com/{owner}/{name}/releases/download/{tag}"
|
||||
|
||||
|
||||
def format_github_repo_download_base_url(
|
||||
owner: str,
|
||||
name: str,
|
||||
branch: str,
|
||||
folder: str,
|
||||
):
|
||||
return f"https://github.com/{owner}/{name}/raw/refs/heads/{branch}/{folder}".removesuffix(
|
||||
"/",
|
||||
)
|
||||
|
||||
|
||||
def make_sha1_validator(expected: str):
|
||||
def validator(file_path: Path):
|
||||
sha = hashlib.sha1() # noqa: S324
|
||||
with file_path.open("rb") as f:
|
||||
while True:
|
||||
data = f.read(1048576) # 1024 * 1024
|
||||
if not data:
|
||||
break
|
||||
sha.update(data)
|
||||
if (got := sha.hexdigest()) != expected:
|
||||
raise ValueError(f"Invalid SHA1, expected {expected}, got {got}")
|
||||
|
||||
return validator
|
||||
|
||||
|
||||
def make_github_repo_sha_getter(
|
||||
owner: str,
|
||||
repo: str,
|
||||
branch: str,
|
||||
folder: str,
|
||||
filename: str,
|
||||
):
|
||||
def getter() -> str:
|
||||
github = get_github()
|
||||
ret = github.rest.git.get_tree(owner, repo, f"{branch}:{folder}")
|
||||
sha = next(
|
||||
x.sha
|
||||
for x in ret.parsed_data.tree
|
||||
if x.path == filename and isinstance(x.sha, str)
|
||||
)
|
||||
return sha[:7]
|
||||
|
||||
return getter
|
||||
|
||||
|
||||
TIME_FORMAT_TEMPLATE = "%Y-%m-%d_%H-%M-%S"
|
||||
|
||||
|
||||
def make_github_release_update_time_getter(
|
||||
owner: str,
|
||||
repo: str,
|
||||
tag: str,
|
||||
filename: str,
|
||||
):
|
||||
def getter() -> str:
|
||||
github = get_github()
|
||||
ret = github.rest.repos.get_release_by_tag(owner, repo, tag)
|
||||
asset = next(x for x in ret.parsed_data.assets if x.name == filename)
|
||||
return asset.updated_at.strftime(TIME_FORMAT_TEMPLATE)
|
||||
|
||||
return getter
|
||||
|
||||
|
||||
def get_ver_filename(filename: str) -> str:
|
||||
return f"{filename}.ver.txt"
|
||||
|
||||
|
||||
def ensure_model(
|
||||
model_base_url: str,
|
||||
model_filename: str,
|
||||
model_version_getter: ModelVersionGetter,
|
||||
):
|
||||
model_path = config.nailong_model_dir / model_filename
|
||||
model_version_path = config.nailong_model_dir / get_ver_filename(model_filename)
|
||||
|
||||
model_exists = model_path.exists()
|
||||
local_ver = (
|
||||
model_version_path.read_text(encoding="u8").strip()
|
||||
if model_exists and model_version_path.exists()
|
||||
else None
|
||||
)
|
||||
|
||||
if model_exists and (not config.nailong_auto_update_model):
|
||||
logger.info(f"Using model {model_filename} (version {local_ver or 'Unknown'})")
|
||||
return model_path
|
||||
|
||||
ver = None
|
||||
validator = None
|
||||
try:
|
||||
ver_ret = model_version_getter()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to get model version of {model_filename}: "
|
||||
f"{type(e).__name__}: {e}",
|
||||
)
|
||||
if model_exists:
|
||||
logger.opt(exception=e).debug("Stacktrace")
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
if isinstance(ver_ret, tuple):
|
||||
ver, validator = ver_ret
|
||||
else:
|
||||
ver = ver_ret
|
||||
|
||||
def download():
|
||||
if not config.nailong_model_dir.exists():
|
||||
config.nailong_model_dir.mkdir(parents=True)
|
||||
url = f"{model_base_url}/{model_filename}"
|
||||
torch.hub.download_url_to_file(url, str(model_path), progress=True)
|
||||
if validator is not None:
|
||||
try:
|
||||
validator(model_path)
|
||||
except Exception:
|
||||
model_path.unlink(missing_ok=True)
|
||||
raise
|
||||
|
||||
if ver is None:
|
||||
logger.warning("Skip update.")
|
||||
elif local_ver != ver:
|
||||
local_ver_display = (
|
||||
f" from version {local_ver or 'Unknown'}" if model_exists else ""
|
||||
)
|
||||
logger.info(
|
||||
f"Updating model {model_filename}{local_ver_display} to version {ver}",
|
||||
)
|
||||
download()
|
||||
model_version_path.write_text(ver, encoding="u8")
|
||||
|
||||
logger.info(f"Using model {model_filename} (version {local_ver or 'Unknown'})")
|
||||
return model_path
|
||||
|
||||
|
||||
def ensure_model_from_github_release(owner: str, repo: str, tag: str, filename: str):
|
||||
return ensure_model(
|
||||
format_github_release_download_base_url(owner, repo, tag),
|
||||
filename,
|
||||
make_github_release_update_time_getter(owner, repo, tag, filename),
|
||||
)
|
||||
|
||||
|
||||
def ensure_model_from_github_repo(
|
||||
owner: str,
|
||||
repo: str,
|
||||
branch: str,
|
||||
folder: str,
|
||||
filename: str,
|
||||
):
|
||||
return ensure_model(
|
||||
format_github_repo_download_base_url(owner, repo, branch, folder),
|
||||
filename,
|
||||
make_github_repo_sha_getter(owner, repo, branch, folder, filename),
|
||||
)
|
||||
18
pdm.lock
generated
18
pdm.lock
generated
@ -5,7 +5,7 @@
|
||||
groups = ["default", "dev", "model1"]
|
||||
strategy = ["inherit_metadata"]
|
||||
lock_version = "4.5.0"
|
||||
content_hash = "sha256:8d60e95796a305224579cea25022c1b1ea6532b2f573ef03af8f9cd57f089399"
|
||||
content_hash = "sha256:d860059b4caf6cf9e7e4d517bee3edfb2ee328d9ac9060de7c25198f5112ef5a"
|
||||
|
||||
[[metadata.targets]]
|
||||
requires_python = "~=3.9"
|
||||
@ -287,7 +287,7 @@ version = "0.4.6"
|
||||
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
summary = "Cross-platform colored terminal text."
|
||||
groups = ["default", "dev"]
|
||||
marker = "sys_platform == \"win32\""
|
||||
marker = "platform_system == \"Windows\" or sys_platform == \"win32\""
|
||||
files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
@ -2399,6 +2399,20 @@ files = [
|
||||
{file = "torchvision-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:47d0751aeaa7057ee6a5973d35e7acad3ad7c17b8e57a2c4304d13e001e330ae"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tqdm"
|
||||
version = "4.66.6"
|
||||
requires_python = ">=3.7"
|
||||
summary = "Fast, Extensible Progress Meter"
|
||||
groups = ["default"]
|
||||
dependencies = [
|
||||
"colorama; platform_system == \"Windows\"",
|
||||
]
|
||||
files = [
|
||||
{file = "tqdm-4.66.6-py3-none-any.whl", hash = "sha256:223e8b5359c2efc4b30555531f09e9f2f3589bcd7fdd389271191031b49b7a63"},
|
||||
{file = "tqdm-4.66.6.tar.gz", hash = "sha256:4bdd694238bef1485ce839d67967ab50af8f9272aab687c0d7702a01da0be090"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "triton"
|
||||
version = "3.1.0"
|
||||
|
||||
@ -21,6 +21,7 @@ dependencies = [
|
||||
"httpx>=0.27.2",
|
||||
"githubkit>=0.11.14",
|
||||
"yarl>=1.17.1",
|
||||
"tqdm>=4.66.6",
|
||||
]
|
||||
license = { text = "MIT" }
|
||||
readme = "README.md"
|
||||
|
||||
Loading…
Reference in New Issue
Block a user