分离magicocacroterline

This commit is contained in:
远野千束 2024-10-13 02:51:33 +08:00
parent a77f97fd4b
commit db385f597b
91 changed files with 3681 additions and 3117 deletions

View File

@ -1,8 +1,9 @@
name: Publish name: Publish
on: on:
release: push:
types: [published] tags:
- 'v*'
jobs: jobs:
pypi-publish: pypi-publish:

View File

@ -8,18 +8,21 @@ import threading
import time import time
from typing import Any, Optional from typing import Any, Optional
from liteyuki.bot.lifespan import (LIFESPAN_FUNC, Lifespan, PROCESS_LIFESPAN_FUNC) from liteyuki.bot.lifespan import LIFESPAN_FUNC, Lifespan, PROCESS_LIFESPAN_FUNC
from liteyuki.comm.channel import get_channel from liteyuki.comm.channel import get_channel
from liteyuki.core.manager import ProcessManager from liteyuki.core.manager import ProcessManager
from liteyuki.log import init_log, logger from liteyuki.log import init_log, logger
from liteyuki.plugin import load_plugin from liteyuki.plugin import load_plugin
from liteyuki.utils import IS_MAIN_PROCESS from liteyuki.utils import IS_MAIN_PROCESS
# new version
from liteyuki.core.manager import sub_process_manager
__all__ = [ __all__ = [
"LiteyukiBot", "LiteyukiBot",
"get_bot", "get_bot",
"get_config", "get_config",
"get_config_with_compat", "get_config_with_compat",
] ]
@ -60,6 +63,7 @@ class LiteyukiBot:
启动逻辑 启动逻辑
""" """
await self.lifespan.before_start() # 启动前钩子 await self.lifespan.before_start() # 启动前钩子
sub_process_manager.start_all()
await self.lifespan.after_start() # 启动后钩子 await self.lifespan.after_start() # 启动后钩子
await self.keep_alive() await self.keep_alive()
@ -108,7 +112,11 @@ class LiteyukiBot:
cmd = "nohup" cmd = "nohup"
self.process_manager.terminate_all() self.process_manager.terminate_all()
# 进程退出后重启 # 进程退出后重启
threading.Thread(target=os.system, args=(f"{cmd} {executable} {' '.join(args)}",), daemon=True).start() threading.Thread(
target=os.system,
args=(f"{cmd} {executable} {' '.join(args)}",),
daemon=True,
).start()
sys.exit(0) sys.exit(0)
self.call_restart_count += 1 self.call_restart_count += 1
@ -189,7 +197,9 @@ class LiteyukiBot:
""" """
return self.lifespan.on_before_process_shutdown(func) return self.lifespan.on_before_process_shutdown(func)
def on_before_process_restart(self, func: PROCESS_LIFESPAN_FUNC) -> PROCESS_LIFESPAN_FUNC: def on_before_process_restart(
self, func: PROCESS_LIFESPAN_FUNC
) -> PROCESS_LIFESPAN_FUNC:
""" """
注册进程重启前的函数为子进程重启时调用 注册进程重启前的函数为子进程重启时调用
Args: Args:
@ -211,7 +221,7 @@ class LiteyukiBot:
return self.lifespan.on_after_restart(func) return self.lifespan.on_after_restart(func)
_BOT_INSTANCE: LiteyukiBot _BOT_INSTANCE: LiteyukiBot | None = None
def get_bot() -> LiteyukiBot: def get_bot() -> LiteyukiBot:
@ -241,7 +251,9 @@ def get_config(key: str, default: Any = None) -> Any:
return get_bot().config.get(key, default) return get_bot().config.get(key, default)
def get_config_with_compat(key: str, compat_keys: tuple[str], default: Any = None) -> Any: def get_config_with_compat(
key: str, compat_keys: tuple[str], default: Any = None
) -> Any:
""" """
获取配置兼容旧版本 获取配置兼容旧版本
Args: Args:
@ -256,14 +268,18 @@ def get_config_with_compat(key: str, compat_keys: tuple[str], default: Any = Non
return get_bot().config[key] return get_bot().config[key]
for compat_key in compat_keys: for compat_key in compat_keys:
if compat_key in get_bot().config: if compat_key in get_bot().config:
logger.warning(f"Config key \"{compat_key}\" will be deprecated, use \"{key}\" instead.") logger.warning(
f'Config key "{compat_key}" will be deprecated, use "{key}" instead.'
)
return get_bot().config[compat_key] return get_bot().config[compat_key]
return default return default
def print_logo(): def print_logo():
"""@litedoc-hide""" """@litedoc-hide"""
print("\033[34m" + r""" print(
"\033[34m"
+ r"""
__ ______ ________ ________ __ __ __ __ __ __ ______ __ ______ ________ ________ __ __ __ __ __ __ ______
/ | / |/ |/ |/ \ / |/ | / |/ | / |/ | / | / |/ |/ |/ \ / |/ | / |/ | / |/ |
$$ | $$$$$$/ $$$$$$$$/ $$$$$$$$/ $$ \ /$$/ $$ | $$ |$$ | /$$/ $$$$$$/ $$ | $$$$$$/ $$$$$$$$/ $$$$$$$$/ $$ \ /$$/ $$ | $$ |$$ | /$$/ $$$$$$/
@ -273,4 +289,6 @@ def print_logo():
$$ |_____ _$$ |_ $$ | $$ |_____ $$ | $$ \__$$ |$$ |$$ \ _$$ |_ $$ |_____ _$$ |_ $$ | $$ |_____ $$ | $$ \__$$ |$$ |$$ \ _$$ |_
$$ |/ $$ | $$ | $$ | $$ | $$ $$/ $$ | $$ |/ $$ | $$ |/ $$ | $$ | $$ | $$ | $$ $$/ $$ | $$ |/ $$ |
$$$$$$$$/ $$$$$$/ $$/ $$$$$$$$/ $$/ $$$$$$/ $$/ $$/ $$$$$$/ $$$$$$$$/ $$$$$$/ $$/ $$$$$$$$/ $$/ $$$$$$/ $$/ $$/ $$$$$$/
""" + "\033[0m") """
+ "\033[0m"
)

View File

@ -4,20 +4,31 @@
""" """
import asyncio import asyncio
from multiprocessing import Pipe from multiprocessing import Pipe
from typing import Any, Callable, Coroutine, Generic, Optional, TypeAlias, TypeVar, get_args from typing import (
Any,
Callable,
Coroutine,
Generic,
Optional,
TypeAlias,
TypeVar,
get_args,
)
from liteyuki.log import logger from liteyuki.log import logger
from liteyuki.utils import IS_MAIN_PROCESS, is_coroutine_callable from liteyuki.utils import IS_MAIN_PROCESS, is_coroutine_callable
T = TypeVar("T") T = TypeVar("T")
SYNC_ON_RECEIVE_FUNC: TypeAlias = Callable[[T], Any] # 同步接收函数 SYNC_ON_RECEIVE_FUNC: TypeAlias = Callable[[T], Any] # 同步接收函数
ASYNC_ON_RECEIVE_FUNC: TypeAlias = Callable[[T], Coroutine[Any, Any, Any]] # 异步接收函数 ASYNC_ON_RECEIVE_FUNC: TypeAlias = Callable[
ON_RECEIVE_FUNC: TypeAlias = SYNC_ON_RECEIVE_FUNC | ASYNC_ON_RECEIVE_FUNC # 接收函数 [T], Coroutine[Any, Any, Any]
] # 异步接收函数
ON_RECEIVE_FUNC: TypeAlias = SYNC_ON_RECEIVE_FUNC | ASYNC_ON_RECEIVE_FUNC # 接收函数
SYNC_FILTER_FUNC: TypeAlias = Callable[[T], bool] # 同步过滤函数 SYNC_FILTER_FUNC: TypeAlias = Callable[[T], bool] # 同步过滤函数
ASYNC_FILTER_FUNC: TypeAlias = Callable[[T], Coroutine[Any, Any, bool]] # 异步过滤函数 ASYNC_FILTER_FUNC: TypeAlias = Callable[[T], Coroutine[Any, Any, bool]] # 异步过滤函数
FILTER_FUNC: TypeAlias = SYNC_FILTER_FUNC | ASYNC_FILTER_FUNC # 过滤函数 FILTER_FUNC: TypeAlias = SYNC_FILTER_FUNC | ASYNC_FILTER_FUNC # 过滤函数
_func_id: int = 0 _func_id: int = 0
_channel: dict[str, "Channel"] = {} _channel: dict[str, "Channel"] = {}
@ -39,7 +50,9 @@ class Channel(Generic[T]):
""" """
self.conn_send, self.conn_recv = Pipe() self.conn_send, self.conn_recv = Pipe()
self._conn_send_inner, self._conn_recv_inner = Pipe() # 内部通道,用于子进程通信 self._conn_send_inner, self._conn_recv_inner = (
Pipe()
) # 内部通道,用于子进程通信
self._closed = False self._closed = False
self._on_main_receive_func_ids: list[int] = [] self._on_main_receive_func_ids: list[int] = []
self._on_sub_receive_func_ids: list[int] = [] self._on_sub_receive_func_ids: list[int] = []
@ -64,7 +77,9 @@ class Channel(Generic[T]):
_channel[name] = self _channel[name] = self
logger.debug(f"Channel {name} initialized in main process") logger.debug(f"Channel {name} initialized in main process")
else: else:
logger.debug(f"Channel {name} initialized in sub process, should manually set in main process") logger.debug(
f"Channel {name} initialized in sub process, should manually set in main process"
)
def _get_generic_type(self) -> Optional[type]: def _get_generic_type(self) -> Optional[type]:
""" """
@ -72,7 +87,7 @@ class Channel(Generic[T]):
Returns: Returns:
Optional[type]: 泛型类型 Optional[type]: 泛型类型
""" """
if hasattr(self, '__orig_class__'): if hasattr(self, "__orig_class__"):
return get_args(self.__orig_class__)[0] return get_args(self.__orig_class__)[0]
return None return None
@ -98,7 +113,10 @@ class Channel(Generic[T]):
elif isinstance(structure, dict): elif isinstance(structure, dict):
if not isinstance(data, dict): if not isinstance(data, dict):
return False return False
return all(k in data and self._validate_structure(data[k], structure[k]) for k in structure) return all(
k in data and self._validate_structure(data[k], structure[k])
for k in structure
)
return False return False
def __str__(self): def __str__(self):
@ -113,10 +131,12 @@ class Channel(Generic[T]):
if self.type_check: if self.type_check:
_type = self._get_generic_type() _type = self._get_generic_type()
if _type is not None and not self._validate_structure(data, _type): if _type is not None and not self._validate_structure(data, _type):
raise TypeError(f"Data must be an instance of {_type}, {type(data)} found") raise TypeError(
f"Data must be an instance of {_type}, {type(data)} found"
)
if self._closed: if self._closed:
raise RuntimeError("Cannot send to a closed channel_") raise RuntimeError("Cannot send to a closed channel")
self.conn_send.send(data) self.conn_send.send(data)
def receive(self) -> T: def receive(self) -> T:
@ -126,7 +146,7 @@ class Channel(Generic[T]):
T: 数据 T: 数据
""" """
if self._closed: if self._closed:
raise RuntimeError("Cannot receive from a closed channel_") raise RuntimeError("Cannot receive from a closed channel")
while True: while True:
data = self.conn_recv.recv() data = self.conn_recv.recv()
@ -142,7 +162,9 @@ class Channel(Generic[T]):
data = await loop.run_in_executor(None, self.receive) data = await loop.run_in_executor(None, self.receive)
return data return data
def on_receive(self, filter_func: Optional[FILTER_FUNC] = None) -> Callable[[Callable[[T], Any]], Callable[[T], Any]]: def on_receive(
self, filter_func: Optional[FILTER_FUNC] = None
) -> Callable[[Callable[[T], Any]], Callable[[T], Any]]:
""" """
接收数据并执行函数 接收数据并执行函数
Args: Args:
@ -187,37 +209,52 @@ class Channel(Generic[T]):
data: 数据 data: 数据
""" """
if IS_MAIN_PROCESS: if IS_MAIN_PROCESS:
[asyncio.create_task(_callback_funcs[func_id](data)) for func_id in self._on_main_receive_func_ids] [
asyncio.create_task(_callback_funcs[func_id](data))
for func_id in self._on_main_receive_func_ids
]
else: else:
[asyncio.create_task(_callback_funcs[func_id](data)) for func_id in self._on_sub_receive_func_ids] [
asyncio.create_task(_callback_funcs[func_id](data))
for func_id in self._on_sub_receive_func_ids
]
"""子进程可用的主动和被动通道""" """子进程可用的主动和被动通道"""
active_channel: Channel = Channel(name="active_channel") # 主动通道 active_channel: Channel = Channel(name="active_channel") # 主动通道
passive_channel: Channel = Channel(name="passive_channel") # 被动通道 passive_channel: Channel = Channel(name="passive_channel") # 被动通道
publish_channel: Channel[tuple[str, dict[str, Any]]] = Channel(name="publish_channel") # 发布通道 publish_channel: Channel[tuple[str, dict[str, Any]]] = Channel(
name="publish_channel"
) # 发布通道
"""通道传递通道,主进程创建单例,子进程初始化时实例化""" """通道传递通道,主进程创建单例,子进程初始化时实例化"""
channel_deliver_active_channel: Channel[Channel[Any]] # 主动通道传递通道 channel_deliver_active_channel: Channel[Channel[Any]] # 主动通道传递通道
channel_deliver_passive_channel: Channel[tuple[str, dict[str, Any]]] # 被动通道传递通道 channel_deliver_passive_channel: Channel[tuple[str, dict[str, Any]]] # 被动通道传递通道
if IS_MAIN_PROCESS: if IS_MAIN_PROCESS:
channel_deliver_active_channel = Channel(name="channel_deliver_active_channel") # 主动通道传递通道 channel_deliver_active_channel = Channel(
channel_deliver_passive_channel = Channel(name="channel_deliver_passive_channel") # 被动通道传递通道 name="channel_deliver_active_channel"
) # 主动通道传递通道
channel_deliver_passive_channel = Channel(
name="channel_deliver_passive_channel"
) # 被动通道传递通道
@channel_deliver_passive_channel.on_receive(
@channel_deliver_passive_channel.on_receive(filter_func=lambda data: data[0] == "set_channel") filter_func=lambda data: data[0] == "set_channel"
)
def on_set_channel(data: tuple[str, dict[str, Any]]): def on_set_channel(data: tuple[str, dict[str, Any]]):
name, channel = data[1]["name"], data[1]["channel_"] name, channel = data[1]["name"], data[1]["channel_"]
set_channel(name, channel) set_channel(name, channel)
@channel_deliver_passive_channel.on_receive(
@channel_deliver_passive_channel.on_receive(filter_func=lambda data: data[0] == "get_channel") filter_func=lambda data: data[0] == "get_channel"
)
def on_get_channel(data: tuple[str, dict[str, Any]]): def on_get_channel(data: tuple[str, dict[str, Any]]):
name, recv_chan = data[1]["name"], data[1]["recv_chan"] name, recv_chan = data[1]["name"], data[1]["recv_chan"]
recv_chan.send(get_channel(name)) recv_chan.send(get_channel(name))
@channel_deliver_passive_channel.on_receive(
@channel_deliver_passive_channel.on_receive(filter_func=lambda data: data[0] == "get_channels") filter_func=lambda data: data[0] == "get_channels"
)
def on_get_channels(data: tuple[str, dict[str, Any]]): def on_get_channels(data: tuple[str, dict[str, Any]]):
recv_chan = data[1]["recv_chan"] recv_chan = data[1]["recv_chan"]
recv_chan.send(get_channels()) recv_chan.send(get_channels())
@ -231,7 +268,9 @@ def set_channel(name: str, channel: "Channel"):
channel ([`Channel`](#class-channel-generic-t)): 通道实例 channel ([`Channel`](#class-channel-generic-t)): 通道实例
""" """
if not isinstance(channel, Channel): if not isinstance(channel, Channel):
raise TypeError(f"channel_ must be an instance of Channel, {type(channel)} found") raise TypeError(
f"channel_ must be an instance of Channel, {type(channel)} found"
)
if IS_MAIN_PROCESS: if IS_MAIN_PROCESS:
if name in _channel: if name in _channel:
@ -241,10 +280,11 @@ def set_channel(name: str, channel: "Channel"):
# 请求主进程设置通道 # 请求主进程设置通道
channel_deliver_passive_channel.send( channel_deliver_passive_channel.send(
( (
"set_channel", { "set_channel",
"name" : name, {
"channel_": channel, "name": name,
} "channel_": channel,
},
) )
) )
@ -273,13 +313,7 @@ def get_channel(name: str) -> "Channel":
else: else:
recv_chan = Channel[Channel[Any]]("recv_chan") recv_chan = Channel[Channel[Any]]("recv_chan")
channel_deliver_passive_channel.send( channel_deliver_passive_channel.send(
( ("get_channel", {"name": name, "recv_chan": recv_chan})
"get_channel",
{
"name" : name,
"recv_chan": recv_chan
}
)
) )
return recv_chan.receive() return recv_chan.receive()
@ -294,12 +328,5 @@ def get_channels() -> dict[str, "Channel"]:
return _channel return _channel
else: else:
recv_chan = Channel[dict[str, Channel[Any]]]("recv_chan") recv_chan = Channel[dict[str, Channel[Any]]]("recv_chan")
channel_deliver_passive_channel.send( channel_deliver_passive_channel.send(("get_channels", {"recv_chan": recv_chan}))
(
"get_channels",
{
"recv_chan": recv_chan
}
)
)
return recv_chan.receive() return recv_chan.receive()

View File

@ -1,26 +0,0 @@
# -*- coding: utf-8 -*-
"""
本模块用于实现RPC(基于IPC)通信
"""
from typing import TypeAlias, Callable, Any
from liteyuki.comm.channel import Channel
ON_CALLING_FUNC: TypeAlias = Callable[[tuple, dict], Any]
class RPC:
"""
RPC类
"""
def __init__(self, on_calling: ON_CALLING_FUNC) -> None:
self.on_calling = on_calling
def call(self, args: tuple, kwargs: dict) -> Any:
"""
调用
"""
# 获取self.calling函数名
return self.on_calling(args, kwargs)

View File

@ -14,6 +14,9 @@ import threading
from multiprocessing import Process from multiprocessing import Process
from typing import Any, Callable, TYPE_CHECKING, TypeAlias from typing import Any, Callable, TYPE_CHECKING, TypeAlias
from croterline.context import Context
from croterline.process import SubProcess, ProcessFuncType
from liteyuki.log import logger from liteyuki.log import logger
from liteyuki.utils import IS_MAIN_PROCESS from liteyuki.utils import IS_MAIN_PROCESS
@ -26,7 +29,10 @@ from liteyuki.comm import Channel
if IS_MAIN_PROCESS: if IS_MAIN_PROCESS:
from liteyuki.comm.channel import get_channel, publish_channel, get_channels from liteyuki.comm.channel import get_channel, publish_channel, get_channels
from liteyuki.comm.storage import shared_memory from liteyuki.comm.storage import shared_memory
from liteyuki.comm.channel import channel_deliver_active_channel, channel_deliver_passive_channel from liteyuki.comm.channel import (
channel_deliver_active_channel,
channel_deliver_passive_channel,
)
else: else:
from liteyuki.comm import channel from liteyuki.comm import channel
from liteyuki.comm import storage from liteyuki.comm import storage
@ -34,20 +40,18 @@ else:
TARGET_FUNC: TypeAlias = Callable[..., Any] TARGET_FUNC: TypeAlias = Callable[..., Any]
TIMEOUT = 10 TIMEOUT = 10
__all__ = [ __all__ = ["ProcessManager", "sub_process_manager"]
"ProcessManager"
]
multiprocessing.set_start_method("spawn", force=True) multiprocessing.set_start_method("spawn", force=True)
class ChannelDeliver: class ChannelDeliver:
def __init__( def __init__(
self, self,
active: Channel[Any], active: Channel[Any],
passive: Channel[Any], passive: Channel[Any],
channel_deliver_active: Channel[Channel[Any]], channel_deliver_active: Channel[Channel[Any]],
channel_deliver_passive: Channel[tuple[str, dict]], channel_deliver_passive: Channel[tuple[str, dict]],
publish: Channel[tuple[str, Any]], publish: Channel[tuple[str, Any]],
): ):
self.active = active self.active = active
self.passive = passive self.passive = passive
@ -57,7 +61,9 @@ class ChannelDeliver:
# 函数处理一些跨进程通道的 # 函数处理一些跨进程通道的
def _delivery_channel_wrapper(func: TARGET_FUNC, cd: ChannelDeliver, sm: "KeyValueStore", *args, **kwargs): def _delivery_channel_wrapper(
func: TARGET_FUNC, cd: ChannelDeliver, sm: "KeyValueStore", *args, **kwargs
):
""" """
子进程入口函数 子进程入口函数
处理一些操作 处理一些操作
@ -68,8 +74,12 @@ def _delivery_channel_wrapper(func: TARGET_FUNC, cd: ChannelDeliver, sm: "KeyVal
channel.active_channel = cd.active # 子进程主动通道 channel.active_channel = cd.active # 子进程主动通道
channel.passive_channel = cd.passive # 子进程被动通道 channel.passive_channel = cd.passive # 子进程被动通道
channel.channel_deliver_active_channel = cd.channel_deliver_active # 子进程通道传递主动通道 channel.channel_deliver_active_channel = (
channel.channel_deliver_passive_channel = cd.channel_deliver_passive # 子进程通道传递被动通道 cd.channel_deliver_active
) # 子进程通道传递主动通道
channel.channel_deliver_passive_channel = (
cd.channel_deliver_passive
) # 子进程通道传递被动通道
channel.publish_channel = cd.publish # 子进程发布通道 channel.publish_channel = cd.publish # 子进程发布通道
# 给子进程创建共享内存实例 # 给子进程创建共享内存实例
@ -102,8 +112,12 @@ class ProcessManager:
chan_active = get_channel(f"{name}-active") chan_active = get_channel(f"{name}-active")
def _start_process(): def _start_process():
process = Process(target=self.targets[name][0], args=self.targets[name][1], process = Process(
kwargs=self.targets[name][2], daemon=True) target=self.targets[name][0],
args=self.targets[name][1],
kwargs=self.targets[name][2],
daemon=True,
)
self.processes[name] = process self.processes[name] = process
process.start() process.start()
@ -133,7 +147,9 @@ class ProcessManager:
for name in self.targets: for name in self.targets:
logger.debug(f"Starting process {name}") logger.debug(f"Starting process {name}")
threading.Thread(target=self._run_process, args=(name, ), daemon=True).start() threading.Thread(
target=self._run_process, args=(name,), daemon=True
).start()
def add_target(self, name: str, target: TARGET_FUNC, args: tuple = (), kwargs=None): def add_target(self, name: str, target: TARGET_FUNC, args: tuple = (), kwargs=None):
""" """
@ -154,10 +170,14 @@ class ProcessManager:
passive=chan_passive, passive=chan_passive,
channel_deliver_active=channel_deliver_active_channel, channel_deliver_active=channel_deliver_active_channel,
channel_deliver_passive=channel_deliver_passive_channel, channel_deliver_passive=channel_deliver_passive_channel,
publish=publish_channel publish=publish_channel,
) )
self.targets[name] = (_delivery_channel_wrapper, (target, channel_deliver, shared_memory, *args), kwargs) self.targets[name] = (
_delivery_channel_wrapper,
(target, channel_deliver, shared_memory, *args),
kwargs,
)
# 主进程通道 # 主进程通道
def join_all(self): def join_all(self):
@ -199,3 +219,54 @@ class ProcessManager:
if name not in self.targets: if name not in self.targets:
logger.warning(f"Process {name} not found.") logger.warning(f"Process {name} not found.")
return self.processes[name].is_alive() return self.processes[name].is_alive()
# new version
class _SubProcessManager:
def __init__(self):
self.processes: dict[str, SubProcess] = {}
def new_process(
self, name: str, *args, **kwargs
) -> Callable[[ProcessFuncType], None]:
def decorator(func: ProcessFuncType):
self.processes[name] = SubProcess(name, func, *args, **kwargs)
return decorator
def add(self, name: str, func: ProcessFuncType, *args, **kwargs):
"""
添加子进程
Args:
func: 子进程函数
name: 子进程名称
args: 子进程函数参数
kwargs: 子进程函数关键字参数
Returns:
"""
self.processes[name] = SubProcess(name, func, *args, **kwargs)
def start(self, name: str):
"""
启动指定子进程
Args:
name: 子进程名称
Returns:
"""
if name not in self.processes:
raise KeyError(f"Process {name} not found.")
self.processes[name].start()
def start_all(self):
"""
启动所有子进程
"""
for name, process in self.processes.items():
process.start()
logger.debug(f"Starting process {name}")
sub_process_manager = _SubProcessManager()

View File

@ -60,7 +60,6 @@ def load_plugin(module_path: str | Path) -> Optional[Plugin]:
f"{metadata.name}({module.__name__.split('.')[-1]})", metadata.type f"{metadata.name}({module.__name__.split('.')[-1]})", metadata.type
) )
else: else:
logger.opt(colors=True).warning( logger.opt(colors=True).warning(
f'The metadata of Liteyuki plugin "{module.__name__}" is not specified, use empty.' f'The metadata of Liteyuki plugin "{module.__name__}" is not specified, use empty.'
) )

510
pdm.lock Normal file
View File

@ -0,0 +1,510 @@
# This file is @generated by PDM.
# It is not intended for manual editing.
[metadata]
groups = ["default", "dev"]
strategy = ["inherit_metadata"]
lock_version = "4.5.0"
content_hash = "sha256:ca1b7f108fb7c5bc51977cf165511cbc7a95f64bc4effd6b6597f60bf893ba77"
[[metadata.targets]]
requires_python = ">=3.10"
[[package]]
name = "annotated-types"
version = "0.7.0"
requires_python = ">=3.8"
summary = "Reusable constraint types to use with typing.Annotated"
groups = ["default"]
dependencies = [
"typing-extensions>=4.0.0; python_version < \"3.9\"",
]
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
]
[[package]]
name = "black"
version = "24.10.0"
requires_python = ">=3.9"
summary = "The uncompromising code formatter."
groups = ["dev"]
dependencies = [
"click>=8.0.0",
"mypy-extensions>=0.4.3",
"packaging>=22.0",
"pathspec>=0.9.0",
"platformdirs>=2",
"tomli>=1.1.0; python_version < \"3.11\"",
"typing-extensions>=4.0.1; python_version < \"3.11\"",
]
files = [
{file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"},
{file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"},
{file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"},
{file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"},
{file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"},
{file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"},
{file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"},
{file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"},
{file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"},
{file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"},
{file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"},
{file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"},
{file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"},
{file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"},
{file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"},
{file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"},
{file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"},
{file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"},
]
[[package]]
name = "click"
version = "8.1.7"
requires_python = ">=3.7"
summary = "Composable command line interface toolkit"
groups = ["dev"]
dependencies = [
"colorama; platform_system == \"Windows\"",
"importlib-metadata; python_version < \"3.8\"",
]
files = [
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
]
[[package]]
name = "colorama"
version = "0.4.6"
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
summary = "Cross-platform colored terminal text."
groups = ["default", "dev"]
marker = "sys_platform == \"win32\" or platform_system == \"Windows\""
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "croterline"
version = "1.0.3"
requires_python = ">=3.10"
summary = "Default template for PDM package"
groups = ["default"]
dependencies = [
"magicoca>=1.0.1",
]
files = [
{file = "croterline-1.0.3-py3-none-any.whl", hash = "sha256:e934041248bba97382cc522c658d6c4f507dbcfe751e90a1d4cb3076b530e99b"},
{file = "croterline-1.0.3.tar.gz", hash = "sha256:eb3874a96ed06d98fe210731ad9352a854df81218fb2c25e707e2b641b6daffb"},
]
[[package]]
name = "exceptiongroup"
version = "1.2.2"
requires_python = ">=3.7"
summary = "Backport of PEP 654 (exception groups)"
groups = ["dev"]
marker = "python_version < \"3.11\""
files = [
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
]
[[package]]
name = "iniconfig"
version = "2.0.0"
requires_python = ">=3.7"
summary = "brain-dead simple config-ini parsing"
groups = ["dev"]
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "loguru"
version = "0.7.2"
requires_python = ">=3.5"
summary = "Python logging made (stupidly) simple"
groups = ["default"]
dependencies = [
"aiocontextvars>=0.2.0; python_version < \"3.7\"",
"colorama>=0.3.4; sys_platform == \"win32\"",
"win32-setctime>=1.0.0; sys_platform == \"win32\"",
]
files = [
{file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"},
{file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"},
]
[[package]]
name = "magicoca"
version = "1.0.1"
requires_python = ">=3.10"
summary = "A communication library for Python"
groups = ["default"]
files = [
{file = "magicoca-1.0.1-py3-none-any.whl", hash = "sha256:69e04be77f9c02d3d0730dc4e739246f4bdefee8b78631040b464cd98cdde51c"},
{file = "magicoca-1.0.1.tar.gz", hash = "sha256:0dbc9a35609db92ec79076f7126566c1e71bd4b853909ecbad9221dcc7fd6f31"},
]
[[package]]
name = "mypy"
version = "1.11.2"
requires_python = ">=3.8"
summary = "Optional static typing for Python"
groups = ["dev"]
dependencies = [
"mypy-extensions>=1.0.0",
"tomli>=1.1.0; python_version < \"3.11\"",
"typing-extensions>=4.6.0",
]
files = [
{file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"},
{file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"},
{file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"},
{file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"},
{file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"},
{file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"},
{file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"},
{file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"},
{file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"},
{file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"},
{file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"},
{file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"},
{file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"},
{file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"},
{file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"},
{file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"},
{file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"},
]
[[package]]
name = "mypy-extensions"
version = "1.0.0"
requires_python = ">=3.5"
summary = "Type system extensions for programs checked with the mypy type checker."
groups = ["dev"]
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
name = "packaging"
version = "24.1"
requires_python = ">=3.8"
summary = "Core utilities for Python packages"
groups = ["dev"]
files = [
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
]
[[package]]
name = "pathspec"
version = "0.12.1"
requires_python = ">=3.8"
summary = "Utility library for gitignore style pattern matching of file paths."
groups = ["dev"]
files = [
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
]
[[package]]
name = "pdm-backend"
version = "2.4.2"
requires_python = ">=3.8"
summary = "The build backend used by PDM that supports latest packaging standards"
groups = ["default"]
dependencies = [
"importlib-metadata>=3.6; python_version < \"3.10\"",
]
files = [
{file = "pdm_backend-2.4.2-py3-none-any.whl", hash = "sha256:8537a3273b19d6448eb07a4a1a92dedc0b60935344a037729ada7be33b5f71ad"},
{file = "pdm_backend-2.4.2.tar.gz", hash = "sha256:1f833e527ae172f34b4b84e2fcf1f65859a2a5ca746e496d8313b3ea6539969f"},
]
[[package]]
name = "platformdirs"
version = "4.3.6"
requires_python = ">=3.8"
summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
groups = ["dev"]
files = [
{file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
{file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
]
[[package]]
name = "pluggy"
version = "1.5.0"
requires_python = ">=3.8"
summary = "plugin and hook calling mechanisms for python"
groups = ["dev"]
files = [
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
]
[[package]]
name = "pydantic"
version = "2.9.2"
requires_python = ">=3.8"
summary = "Data validation using Python type hints"
groups = ["default"]
dependencies = [
"annotated-types>=0.6.0",
"pydantic-core==2.23.4",
"typing-extensions>=4.12.2; python_version >= \"3.13\"",
"typing-extensions>=4.6.1; python_version < \"3.13\"",
]
files = [
{file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"},
{file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"},
]
[[package]]
name = "pydantic-core"
version = "2.23.4"
requires_python = ">=3.8"
summary = "Core functionality for Pydantic validation and serialization"
groups = ["default"]
dependencies = [
"typing-extensions!=4.7.0,>=4.6.0",
]
files = [
{file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"},
{file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"},
{file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"},
{file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"},
{file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"},
{file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"},
{file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"},
{file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"},
{file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"},
{file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"},
{file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"},
{file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"},
{file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"},
{file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"},
{file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"},
{file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"},
{file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"},
{file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"},
{file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"},
{file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"},
{file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"},
{file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"},
{file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"},
{file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"},
{file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"},
{file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"},
{file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"},
{file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"},
{file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"},
{file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"},
{file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"},
{file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"},
{file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"},
{file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"},
{file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"},
{file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"},
{file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"},
{file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"},
{file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"},
{file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"},
{file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"},
{file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"},
{file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"},
{file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"},
{file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"},
{file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"},
{file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"},
{file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"},
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"},
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"},
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"},
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"},
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"},
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"},
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"},
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"},
{file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"},
]
[[package]]
name = "pytest"
version = "8.3.3"
requires_python = ">=3.8"
summary = "pytest: simple powerful testing with Python"
groups = ["dev"]
dependencies = [
"colorama; sys_platform == \"win32\"",
"exceptiongroup>=1.0.0rc8; python_version < \"3.11\"",
"iniconfig",
"packaging",
"pluggy<2,>=1.5",
"tomli>=1; python_version < \"3.11\"",
]
files = [
{file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
{file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
]
[[package]]
name = "pyyaml"
version = "6.0.2"
requires_python = ">=3.8"
summary = "YAML parser and emitter for Python"
groups = ["default"]
files = [
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
{file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
{file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
{file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
{file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
{file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
{file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
{file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
{file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
{file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
{file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
{file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
{file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
{file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
{file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
]
[[package]]
name = "toml"
version = "0.10.2"
requires_python = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
summary = "Python Library for Tom's Obvious, Minimal Language"
groups = ["default"]
files = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
[[package]]
name = "tomli"
version = "2.0.2"
requires_python = ">=3.8"
summary = "A lil' TOML parser"
groups = ["dev"]
marker = "python_version < \"3.11\""
files = [
{file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"},
{file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"},
]
[[package]]
name = "typing-extensions"
version = "4.12.2"
requires_python = ">=3.8"
summary = "Backported and Experimental Type Hints for Python 3.8+"
groups = ["default", "dev"]
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
[[package]]
name = "uv"
version = "0.4.20"
requires_python = ">=3.8"
summary = "An extremely fast Python package and project manager, written in Rust."
groups = ["dev"]
files = [
{file = "uv-0.4.20-py3-none-linux_armv6l.whl", hash = "sha256:d0566f3ce596b0192099f7a01be08e1f37061d7399e0128804794cf83cdf2806"},
{file = "uv-0.4.20-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:1f20251b5a6a1cc92d844153b128b346bd0be8178beb4945df63d1a76a905176"},
{file = "uv-0.4.20-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d37f02ae48540104d9c13d2dfe27bf84b246d5945b55d91568404da08e2a3bd8"},
{file = "uv-0.4.20-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:555f0275c3db5b1cd13f6a6825b0b0f23e116a58a46da65f55d4f07915b36b16"},
{file = "uv-0.4.20-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6faba47d13c1b916bfe9a1828a792ba21558871b4b81dbb79c157077f558fb3"},
{file = "uv-0.4.20-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:092d4d3cee4a9680832c16d5c1a5e816b2d07a31328580f04e4ddf437821b1f3"},
{file = "uv-0.4.20-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5d62655450d173a4dbe76b70b9af81ffa501501d97224f311f126b30924b42f7"},
{file = "uv-0.4.20-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:865c5fbc2ebe73b4f4b71cbcc1b1bae90a335b15f6eaa9fa6495f77a6e86455e"},
{file = "uv-0.4.20-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a65eaec88b084094f5b08c2ad73f0ae972f7d6afd0d3ee1d0eb29a76c010a39b"},
{file = "uv-0.4.20-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8e3492d5f1613e88201b6f68a2e5fba48b0bdbe0f11179df9b222e9dd8d89d3"},
{file = "uv-0.4.20-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:8ec4a7d0ab131ea749702d4885ff0f6734e1aca1dc26ebbc1c7c67969ba3c0fc"},
{file = "uv-0.4.20-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:588aedc47fe02f8cf0dfe0dec3fd5e1f3a707fdf674964b3d31f0523351db9d2"},
{file = "uv-0.4.20-py3-none-musllinux_1_1_i686.whl", hash = "sha256:309539e9b29f3fbbedb3835297a324a9206b42005e15b0af3fa73343ab966349"},
{file = "uv-0.4.20-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:8ad94fb135bec5c061ba21b1f081f349c3de2b0f8660e168e5afc829d3069e6d"},
{file = "uv-0.4.20-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:74f78748e72893a674351ca9d708003629ddc1a00bc51100c901b5d47db73e43"},
{file = "uv-0.4.20-py3-none-win32.whl", hash = "sha256:dbf454b6f56f9181886426c7aed7a8dfc8258f80082365fe99b2044ff92261ba"},
{file = "uv-0.4.20-py3-none-win_amd64.whl", hash = "sha256:653bfec188d199384451804a6c055fb1d28662adfee7697fe7108c6fb78924ba"},
{file = "uv-0.4.20.tar.gz", hash = "sha256:b4c8a2027b1f19f8b8949132e728a750e4f9b4bb0ec02544d9b21df3f525ab1a"},
]
[[package]]
name = "watchdog"
version = "5.0.3"
requires_python = ">=3.9"
summary = "Filesystem events monitoring"
groups = ["default"]
files = [
{file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:85527b882f3facda0579bce9d743ff7f10c3e1e0db0a0d0e28170a7d0e5ce2ea"},
{file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:53adf73dcdc0ef04f7735066b4a57a4cd3e49ef135daae41d77395f0b5b692cb"},
{file = "watchdog-5.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e25adddab85f674acac303cf1f5835951345a56c5f7f582987d266679979c75b"},
{file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f01f4a3565a387080dc49bdd1fefe4ecc77f894991b88ef927edbfa45eb10818"},
{file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91b522adc25614cdeaf91f7897800b82c13b4b8ac68a42ca959f992f6990c490"},
{file = "watchdog-5.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d52db5beb5e476e6853da2e2d24dbbbed6797b449c8bf7ea118a4ee0d2c9040e"},
{file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:94d11b07c64f63f49876e0ab8042ae034674c8653bfcdaa8c4b32e71cfff87e8"},
{file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:349c9488e1d85d0a58e8cb14222d2c51cbc801ce11ac3936ab4c3af986536926"},
{file = "watchdog-5.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:53a3f10b62c2d569e260f96e8d966463dec1a50fa4f1b22aec69e3f91025060e"},
{file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:950f531ec6e03696a2414b6308f5c6ff9dab7821a768c9d5788b1314e9a46ca7"},
{file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae6deb336cba5d71476caa029ceb6e88047fc1dc74b62b7c4012639c0b563906"},
{file = "watchdog-5.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1021223c08ba8d2d38d71ec1704496471ffd7be42cfb26b87cd5059323a389a1"},
{file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:90a67d7857adb1d985aca232cc9905dd5bc4803ed85cfcdcfcf707e52049eda7"},
{file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:720ef9d3a4f9ca575a780af283c8fd3a0674b307651c1976714745090da5a9e8"},
{file = "watchdog-5.0.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dd021efa85970bd4824acacbb922066159d0f9e546389a4743d56919b6758b91"},
{file = "watchdog-5.0.3-py3-none-manylinux2014_armv7l.whl", hash = "sha256:78864cc8f23dbee55be34cc1494632a7ba30263951b5b2e8fc8286b95845f82c"},
{file = "watchdog-5.0.3-py3-none-manylinux2014_i686.whl", hash = "sha256:1e9679245e3ea6498494b3028b90c7b25dbb2abe65c7d07423ecfc2d6218ff7c"},
{file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64.whl", hash = "sha256:9413384f26b5d050b6978e6fcd0c1e7f0539be7a4f1a885061473c5deaa57221"},
{file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:294b7a598974b8e2c6123d19ef15de9abcd282b0fbbdbc4d23dfa812959a9e05"},
{file = "watchdog-5.0.3-py3-none-manylinux2014_s390x.whl", hash = "sha256:26dd201857d702bdf9d78c273cafcab5871dd29343748524695cecffa44a8d97"},
{file = "watchdog-5.0.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:0f9332243355643d567697c3e3fa07330a1d1abf981611654a1f2bf2175612b7"},
{file = "watchdog-5.0.3-py3-none-win32.whl", hash = "sha256:c66f80ee5b602a9c7ab66e3c9f36026590a0902db3aea414d59a2f55188c1f49"},
{file = "watchdog-5.0.3-py3-none-win_amd64.whl", hash = "sha256:f00b4cf737f568be9665563347a910f8bdc76f88c2970121c86243c8cfdf90e9"},
{file = "watchdog-5.0.3-py3-none-win_ia64.whl", hash = "sha256:49f4d36cb315c25ea0d946e018c01bb028048023b9e103d3d3943f58e109dd45"},
{file = "watchdog-5.0.3.tar.gz", hash = "sha256:108f42a7f0345042a854d4d0ad0834b741d421330d5f575b81cb27b883500176"},
]
[[package]]
name = "win32-setctime"
version = "1.1.0"
requires_python = ">=3.5"
summary = "A small Python utility to set file creation time on Windows"
groups = ["default"]
marker = "sys_platform == \"win32\""
files = [
{file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
{file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
]

View File

@ -10,17 +10,18 @@ readme = "README.md"
requires-python = ">=3.10" requires-python = ">=3.10"
authors = [ authors = [
{ name = "snowykami", email = "snowykami@outlook.com" }, { name = "snowykami", email = "snowykami@outlook.com" },
{ name = "LiteyukiStudio", email = "studio@liteyuki.icu" },
] ]
license = { text = "MIT&LSO" } license = { text = "MIT&LSO" }
dependencies = [ dependencies = [
"loguru~=0.7.2", "loguru~=0.7.2",
"pydantic==2.8.2", "pydantic>=2.8.2",
"PyYAML==6.0.2", "PyYAML>=6.0.2",
"toml==0.10.2", "toml>=0.10.2",
"watchdog==4.0.1", "watchdog>=4.0.1",
"pdm-backend==2.3.3" "pdm-backend>=2.3.3",
"magicoca>=1.0.1",
"croterline>=1.0.3"
] ]
[project.urls] [project.urls]
@ -38,5 +39,14 @@ includes = ["liteyuki/", "LICENSE", "README.md"]
excludes = ["tests/", "docs/", "src/"] excludes = ["tests/", "docs/", "src/"]
[tool.pdm.version] [tool.pdm.version]
source = "file" source = "scm"
path = "liteyuki/__init__.py" tag_filter = "v*"
tag_regex = '^v(?:\D*)?(?P<version>([1-9][0-9]*!)?(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))*((a|b|c|rc)(0|[1-9][0-9]*))?(\.post(0|[1-9][0-9]*))?(\.dev(0|[1-9][0-9]*))?$)$'
[tool.pdm.dev-dependencies]
dev = [
"pytest>=8.3.3",
"black>=24.10.0",
"uv>=0.4.20",
"mypy>=1.11.2",
]

View File

@ -1,6 +1,7 @@
aiohttp>=3.9.3 aiohttp>=3.9.3
aiofiles>=23.2.1 aiofiles>=23.2.1
colored>=2.2.4 colored>=2.2.4
croterline>=1.0.3
GitPython>=3.1.43 GitPython>=3.1.43
httpx>=0.27.0 httpx>=0.27.0
nonebot-plugin-htmlrender>=0.1.0 nonebot-plugin-htmlrender>=0.1.0
@ -25,4 +26,9 @@ toml>=0.10.2
importlib_metadata>=7.0.2 importlib_metadata>=7.0.2
watchdog>=4.0.0 watchdog>=4.0.0
jieba>=0.42.1 jieba>=0.42.1
python-dotenv>=1.0.1 python-dotenv>=1.0.1
loguru~=0.7.2
pydantic~=2.9.2
pip~=23.2.1
fastapi~=0.115.0
magicoca~=1.0.1

View File

@ -1,53 +0,0 @@
# -*- coding: utf-8 -*-
"""
Copyright (C) 2020-2024 LiteyukiStudio. All Rights Reserved
@Time : 2024/8/11 下午5:24
@Author : snowykami
@Email : snowykami@outlook.com
@File : __init__.py.py
@Software: PyCharm
"""
import nonebot
from liteyuki.utils import IS_MAIN_PROCESS
from liteyuki.plugin import PluginMetadata, PluginType
from .nb_utils import adapter_manager, driver_manager # type: ignore
from liteyuki.log import logger
__plugin_meta__ = PluginMetadata(
name="NoneBot2启动器",
type=PluginType.APPLICATION,
)
def nb_run(*args, **kwargs):
"""
初始化NoneBot并运行在子进程
Args:
**kwargs:
Returns:
"""
# 给子进程传递通道对象
kwargs.update(kwargs.get("nonebot", {})) # nonebot配置优先
nonebot.init(**kwargs)
driver_manager.init(config=kwargs)
adapter_manager.init(kwargs)
adapter_manager.register()
try:
# nonebot.load_plugin("nonebot-plugin-lnpm") # 尝试加载轻雪NoneBot插件加载器Nonebot插件
nonebot.load_plugin("src.liteyuki_main") # 尝试加载轻雪主插件Nonebot插件
except Exception as e:
pass
nonebot.run()
if IS_MAIN_PROCESS:
from liteyuki import get_bot
from .dev_reloader import *
liteyuki = get_bot()
liteyuki.process_manager.add_target(name="nonebot", target=nb_run, args=(), kwargs=liteyuki.config)

View File

@ -1,24 +0,0 @@
# -*- coding: utf-8 -*-
"""
NoneBot 开发环境重载监视器
"""
import os.path
from liteyuki.dev import observer
from liteyuki import get_bot, logger
from liteyuki.utils import IS_MAIN_PROCESS
from watchdog.events import FileSystemEvent
liteyuki = get_bot()
exclude_extensions = (".pyc", ".pyo")
@observer.on_file_system_event(
directories=("src/nonebot_plugins",),
event_filter=lambda event: not event.src_path.endswith(exclude_extensions) and ("__pycache__" not in event.src_path ) and os.path.isfile(event.src_path)
)
def restart_nonebot_process(event: FileSystemEvent):
logger.debug(f"File {event.src_path} changed, reloading nonebot...")
liteyuki.restart_process("nonebot")

View File

@ -0,0 +1,33 @@
import os.path
from pathlib import Path
import nonebot
from croterline.utils import IsMainProcess
from liteyuki import get_bot
from liteyuki.core import sub_process_manager
from liteyuki.plugin import PluginMetadata, PluginType
__plugin_meta__ = PluginMetadata(
name="NoneBot2启动器",
type=PluginType.APPLICATION,
)
def nb_run(*args, **kwargs):
nonebot.init(**kwargs)
from .nb_utils import driver_manager, adapter_manager
driver_manager.init(config=kwargs)
adapter_manager.init(kwargs)
adapter_manager.register()
nonebot.load_plugin(Path(os.path.dirname(__file__)) / "np_main")
nonebot.run()
if IsMainProcess:
from .dev_reloader import *
bot = get_bot()
sub_process_manager.add(
name="nonebot", func=nb_run, **bot.config.get("nonebot", {})
)

View File

@ -10,7 +10,7 @@ from .common import MessageEventModel, msg_db
from src.utils.base.language import Language from src.utils.base.language import Language
from src.utils.base.resource import get_path from src.utils.base.resource import get_path
from src.utils.message.string_tool import convert_seconds_to_time from src.utils.message.string_tool import convert_seconds_to_time
from ...utils.external.logo import get_group_icon, get_user_icon from src.utils.external.logo import get_group_icon, get_user_icon
async def count_msg_by_bot_id(bot_id: str) -> int: async def count_msg_by_bot_id(bot_id: str) -> int:

View File

@ -15,7 +15,7 @@ __plugin_meta__ = PluginMetadata(
} }
) )
from ...utils.base.data_manager import set_memory_data from src.utils.base.data_manager import set_memory_data
driver = get_driver() driver = get_driver()

View File

@ -3,8 +3,8 @@ import aiohttp
from .qw_models import * from .qw_models import *
import httpx import httpx
from ...utils.base.data_manager import get_memory_data from src.utils.base.data_manager import get_memory_data
from ...utils.base.language import Language from src.utils.base.language import Language
dev_url = "https://devapi.qweather.com/" # 开发HBa dev_url = "https://devapi.qweather.com/" # 开发HBa
com_url = "https://api.qweather.com/" # 正式环境 com_url = "https://api.qweather.com/" # 正式环境

View File

@ -1,20 +1,20 @@
from nonebot.plugin import PluginMetadata from nonebot.plugin import PluginMetadata
from .core import * from .core import *
from .loader import * from .loader import *
__author__ = "snowykami" __author__ = "snowykami"
__plugin_meta__ = PluginMetadata( __plugin_meta__ = PluginMetadata(
name="轻雪核心插件", name="轻雪核心插件",
description="轻雪主程序插件,包含了许多初始化的功能", description="轻雪主程序插件,包含了许多初始化的功能",
usage="", usage="",
homepage="https://github.com/snowykami/LiteyukiBot", homepage="https://github.com/snowykami/LiteyukiBot",
extra={ extra={
"liteyuki" : True, "liteyuki" : True,
"toggleable": False, "toggleable": False,
} }
) )
from ..utils.base.language import Language, get_default_lang_code from src.utils.base.language import Language, get_default_lang_code
sys_lang = Language(get_default_lang_code()) sys_lang = Language(get_default_lang_code())
nonebot.logger.info(sys_lang.get("main.current_language", LANG=sys_lang.get("language.name"))) nonebot.logger.info(sys_lang.get("main.current_language", LANG=sys_lang.get("language.name")))

View File

@ -1,47 +1,47 @@
import nonebot import nonebot
from git import Repo from git import Repo
from src.utils.base.config import get_config from src.utils.base.config import get_config
remote_urls = [ remote_urls = [
"https://github.com/LiteyukiStudio/LiteyukiBot.git", "https://github.com/LiteyukiStudio/LiteyukiBot.git",
"https://gitee.com/snowykami/LiteyukiBot.git" "https://gitee.com/snowykami/LiteyukiBot.git"
] ]
def detect_update() -> bool: def detect_update() -> bool:
# 对每个远程仓库进行检查只要有一个仓库有更新就返回True # 对每个远程仓库进行检查只要有一个仓库有更新就返回True
for remote_url in remote_urls: for remote_url in remote_urls:
repo = Repo(".") repo = Repo(".")
repo.remotes.origin.set_url(remote_url) repo.remotes.origin.set_url(remote_url)
repo.remotes.origin.fetch() repo.remotes.origin.fetch()
if repo.head.commit != repo.commit('origin/main'): if repo.head.commit != repo.commit('origin/main'):
return True return True
def update_liteyuki() -> tuple[bool, str]: def update_liteyuki() -> tuple[bool, str]:
"""更新轻雪 """更新轻雪
:return: 是否更新成功更新变动""" :return: 是否更新成功更新变动"""
if get_config("allow_update", True): if get_config("allow_update", True):
new_commit_detected = detect_update() new_commit_detected = detect_update()
if new_commit_detected: if new_commit_detected:
repo = Repo(".") repo = Repo(".")
logs = "" logs = ""
# 对每个远程仓库进行更新 # 对每个远程仓库进行更新
for remote_url in remote_urls: for remote_url in remote_urls:
try: try:
logs += f"\nremote: {remote_url}" logs += f"\nremote: {remote_url}"
repo.remotes.origin.set_url(remote_url) repo.remotes.origin.set_url(remote_url)
repo.remotes.origin.pull() repo.remotes.origin.pull()
diffs = repo.head.commit.diff("origin/main") diffs = repo.head.commit.diff("origin/main")
for diff in diffs.iter_change_type('M'): for diff in diffs.iter_change_type('M'):
logs += f"\n{diff.a_path}" logs += f"\n{diff.a_path}"
return True, logs return True, logs
except: except:
continue continue
else: else:
return False, "Nothing Changed" return False, "Nothing Changed"
else: else:
raise PermissionError("Update is not allowed.") raise PermissionError("Update is not allowed.")

View File

@ -1,301 +1,301 @@
import time import time
from typing import AnyStr from typing import AnyStr
import time import time
from typing import AnyStr from typing import AnyStr
import nonebot import nonebot
import pip import pip
from nonebot import get_driver, require from nonebot import get_driver, require
from nonebot.adapters import onebot, satori from nonebot.adapters import onebot, satori
from nonebot.adapters.onebot.v11 import Message, unescape from nonebot.adapters.onebot.v11 import Message, unescape
from nonebot.internal.matcher import Matcher from nonebot.internal.matcher import Matcher
from nonebot.permission import SUPERUSER from nonebot.permission import SUPERUSER
# from src.liteyuki.core import Reloader # from src.liteyuki.core import Reloader
from src.utils import event as event_utils, satori_utils from src.utils import event as event_utils, satori_utils
from src.utils.base.config import get_config from src.utils.base.config import get_config
from src.utils.base.data_manager import TempConfig, common_db from src.utils.base.data_manager import TempConfig, common_db
from src.utils.base.language import get_user_lang from src.utils.base.language import get_user_lang
from src.utils.base.ly_typing import T_Bot, T_MessageEvent from src.utils.base.ly_typing import T_Bot, T_MessageEvent
from src.utils.message.message import MarkdownMessage as md, broadcast_to_superusers from src.utils.message.message import MarkdownMessage as md, broadcast_to_superusers
from .api import update_liteyuki # type: ignore from .api import update_liteyuki # type: ignore
from ..utils.base import reload # type: ignore from src.utils.base import reload # type: ignore
from ..utils.base.ly_function import get_function # type: ignore from src.utils.base.ly_function import get_function # type: ignore
from ..utils.message.html_tool import md_to_pic from src.utils.message.html_tool import md_to_pic
require("nonebot_plugin_alconna") require("nonebot_plugin_alconna")
require("nonebot_plugin_apscheduler") require("nonebot_plugin_apscheduler")
from nonebot_plugin_alconna import UniMessage, on_alconna, Alconna, Args, Arparma, MultiVar from nonebot_plugin_alconna import UniMessage, on_alconna, Alconna, Args, Arparma, MultiVar
from nonebot_plugin_apscheduler import scheduler from nonebot_plugin_apscheduler import scheduler
driver = get_driver() driver = get_driver()
@on_alconna( @on_alconna(
command=Alconna( command=Alconna(
"liteecho", "liteecho",
Args["text", str, ""], Args["text", str, ""],
), ),
permission=SUPERUSER permission=SUPERUSER
).handle() ).handle()
# Satori OK # Satori OK
async def _(bot: T_Bot, matcher: Matcher, result: Arparma): async def _(bot: T_Bot, matcher: Matcher, result: Arparma):
if text := result.main_args.get("text"): if text := result.main_args.get("text"):
await matcher.finish(Message(unescape(text))) await matcher.finish(Message(unescape(text)))
else: else:
await matcher.finish(f"Hello, Liteyuki!\nBot {bot.self_id}") await matcher.finish(f"Hello, Liteyuki!\nBot {bot.self_id}")
@on_alconna( @on_alconna(
aliases={"更新轻雪"}, aliases={"更新轻雪"},
command=Alconna( command=Alconna(
"update-liteyuki" "update-liteyuki"
), ),
permission=SUPERUSER permission=SUPERUSER
).handle() ).handle()
# Satori OK # Satori OK
async def _(bot: T_Bot, event: T_MessageEvent, matcher: Matcher): async def _(bot: T_Bot, event: T_MessageEvent, matcher: Matcher):
# 使用git pull更新 # 使用git pull更新
ulang = get_user_lang(str(event.user.id if isinstance(event, satori.event.Event) else event.user_id)) ulang = get_user_lang(str(event.user.id if isinstance(event, satori.event.Event) else event.user_id))
success, logs = update_liteyuki() success, logs = update_liteyuki()
reply = "Liteyuki updated!\n" reply = "Liteyuki updated!\n"
reply += f"```\n{logs}\n```\n" reply += f"```\n{logs}\n```\n"
btn_restart = md.btn_cmd(ulang.get("liteyuki.restart_now"), "reload-liteyuki") btn_restart = md.btn_cmd(ulang.get("liteyuki.restart_now"), "reload-liteyuki")
pip.main(["install", "-r", "requirements.txt"]) pip.main(["install", "-r", "requirements.txt"])
reply += f"{ulang.get('liteyuki.update_restart', RESTART=btn_restart)}" reply += f"{ulang.get('liteyuki.update_restart', RESTART=btn_restart)}"
# await md.send_md(reply, bot) # await md.send_md(reply, bot)
img_bytes = await md_to_pic(reply) img_bytes = await md_to_pic(reply)
await UniMessage.send(UniMessage.image(raw=img_bytes)) await UniMessage.send(UniMessage.image(raw=img_bytes))
@on_alconna( @on_alconna(
aliases={"重启轻雪"}, aliases={"重启轻雪"},
command=Alconna( command=Alconna(
"reload-liteyuki" "reload-liteyuki"
), ),
permission=SUPERUSER permission=SUPERUSER
).handle() ).handle()
# Satori OK # Satori OK
async def _(matcher: Matcher, bot: T_Bot, event: T_MessageEvent): async def _(matcher: Matcher, bot: T_Bot, event: T_MessageEvent):
await matcher.send("Liteyuki reloading") await matcher.send("Liteyuki reloading")
temp_data = common_db.where_one(TempConfig(), default=TempConfig()) temp_data = common_db.where_one(TempConfig(), default=TempConfig())
temp_data.data.update( temp_data.data.update(
{ {
"reload" : True, "reload" : True,
"reload_time" : time.time(), "reload_time" : time.time(),
"reload_bot_id" : bot.self_id, "reload_bot_id" : bot.self_id,
"reload_session_type": event_utils.get_message_type(event), "reload_session_type": event_utils.get_message_type(event),
"reload_session_id" : (event.group_id if event.message_type == "group" else event.user_id) "reload_session_id" : (event.group_id if event.message_type == "group" else event.user_id)
if not isinstance(event, satori.event.Event) else event.chan_active.id, if not isinstance(event, satori.event.Event) else event.chan_active.id,
"delta_time" : 0 "delta_time" : 0
} }
) )
common_db.save(temp_data) common_db.save(temp_data)
reload() reload()
@on_alconna( @on_alconna(
command=Alconna( command=Alconna(
"liteyuki-docs", "liteyuki-docs",
), ),
aliases={"轻雪文档"}, aliases={"轻雪文档"},
).handle() ).handle()
# Satori OK # Satori OK
async def _(matcher: Matcher): async def _(matcher: Matcher):
await matcher.finish("https://bot.liteyuki.icu/") await matcher.finish("https://bot.liteyuki.icu/")
@on_alconna( @on_alconna(
command=Alconna( command=Alconna(
"/function", "/function",
Args["function", str]["args", MultiVar(str), ()], Args["function", str]["args", MultiVar(str), ()],
), ),
permission=SUPERUSER permission=SUPERUSER
).handle() ).handle()
async def _(result: Arparma, bot: T_Bot, event: T_MessageEvent, matcher: Matcher): async def _(result: Arparma, bot: T_Bot, event: T_MessageEvent, matcher: Matcher):
""" """
调用轻雪函数 调用轻雪函数
Args: Args:
result: result:
bot: bot:
event: event:
Returns: Returns:
""" """
function_name = result.main_args.get("function") function_name = result.main_args.get("function")
args: tuple[str] = result.main_args.get("args", ()) args: tuple[str] = result.main_args.get("args", ())
_args = [] _args = []
_kwargs = { _kwargs = {
"USER_ID" : str(event.user_id), "USER_ID" : str(event.user_id),
"GROUP_ID": str(event.group_id) if event.message_type == "group" else "0", "GROUP_ID": str(event.group_id) if event.message_type == "group" else "0",
"BOT_ID" : str(bot.self_id) "BOT_ID" : str(bot.self_id)
} }
for arg in args: for arg in args:
arg = arg.replace("\\=", "EQUAL_SIGN") arg = arg.replace("\\=", "EQUAL_SIGN")
if "=" in arg: if "=" in arg:
key, value = arg.split("=", 1) key, value = arg.split("=", 1)
value = unescape(value.replace("EQUAL_SIGN", "=")) value = unescape(value.replace("EQUAL_SIGN", "="))
try: try:
value = eval(value) value = eval(value)
except: except:
value = value value = value
_kwargs[key] = value _kwargs[key] = value
else: else:
_args.append(arg.replace("EQUAL_SIGN", "=")) _args.append(arg.replace("EQUAL_SIGN", "="))
ly_func = get_function(function_name) ly_func = get_function(function_name)
ly_func.bot = bot if "BOT_ID" not in _kwargs else nonebot.get_bot(_kwargs["BOT_ID"]) ly_func.bot = bot if "BOT_ID" not in _kwargs else nonebot.get_bot(_kwargs["BOT_ID"])
ly_func.matcher = matcher ly_func.matcher = matcher
await ly_func(*tuple(_args), **_kwargs) await ly_func(*tuple(_args), **_kwargs)
@on_alconna( @on_alconna(
command=Alconna( command=Alconna(
"/api", "/api",
Args["api", str]["args", MultiVar(AnyStr), ()], Args["api", str]["args", MultiVar(AnyStr), ()],
), ),
permission=SUPERUSER permission=SUPERUSER
).handle() ).handle()
async def _(result: Arparma, bot: T_Bot, event: T_MessageEvent, matcher: Matcher): async def _(result: Arparma, bot: T_Bot, event: T_MessageEvent, matcher: Matcher):
""" """
调用API 调用API
Args: Args:
result: result:
bot: bot:
event: event:
Returns: Returns:
""" """
api_name = result.main_args.get("api") api_name = result.main_args.get("api")
args: tuple[str] = result.main_args.get("args", ()) # 类似于url参数但每个参数间用空格分隔空格是%20 args: tuple[str] = result.main_args.get("args", ()) # 类似于url参数但每个参数间用空格分隔空格是%20
args_dict = {} args_dict = {}
for arg in args: for arg in args:
key, value = arg.split("=", 1) key, value = arg.split("=", 1)
args_dict[key] = unescape(value.replace("%20", " ")) args_dict[key] = unescape(value.replace("%20", " "))
if api_name in need_user_id and "user_id" not in args_dict: if api_name in need_user_id and "user_id" not in args_dict:
args_dict["user_id"] = str(event.user_id) args_dict["user_id"] = str(event.user_id)
if api_name in need_group_id and "group_id" not in args_dict and event.message_type == "group": if api_name in need_group_id and "group_id" not in args_dict and event.message_type == "group":
args_dict["group_id"] = str(event.group_id) args_dict["group_id"] = str(event.group_id)
if "message" in args_dict: if "message" in args_dict:
args_dict["message"] = Message(eval(args_dict["message"])) args_dict["message"] = Message(eval(args_dict["message"]))
if "messages" in args_dict: if "messages" in args_dict:
args_dict["messages"] = Message(eval(args_dict["messages"])) args_dict["messages"] = Message(eval(args_dict["messages"]))
try: try:
result = await bot.call_api(api_name, **args_dict) result = await bot.call_api(api_name, **args_dict)
except Exception as e: except Exception as e:
result = str(e) result = str(e)
args_show = "\n".join("- %s: %s" % (k, v) for k, v in args_dict.items()) args_show = "\n".join("- %s: %s" % (k, v) for k, v in args_dict.items())
await matcher.finish(f"API: {api_name}\n\nArgs: \n{args_show}\n\nResult: {result}") await matcher.finish(f"API: {api_name}\n\nArgs: \n{args_show}\n\nResult: {result}")
@driver.on_startup @driver.on_startup
async def on_startup(): async def on_startup():
temp_data = common_db.where_one(TempConfig(), default=TempConfig()) temp_data = common_db.where_one(TempConfig(), default=TempConfig())
# 储存重启信息 # 储存重启信息
if temp_data.data.get("reload", False): if temp_data.data.get("reload", False):
delta_time = time.time() - temp_data.data.get("reload_time", 0) delta_time = time.time() - temp_data.data.get("reload_time", 0)
temp_data.data["delta_time"] = delta_time temp_data.data["delta_time"] = delta_time
common_db.save(temp_data) # 更新数据 common_db.save(temp_data) # 更新数据
""" """
该部分将迁移至轻雪生命周期 该部分将迁移至轻雪生命周期
Returns: Returns:
""" """
@driver.on_shutdown @driver.on_shutdown
async def on_shutdown(): async def on_shutdown():
pass pass
@driver.on_bot_connect @driver.on_bot_connect
async def _(bot: T_Bot): async def _(bot: T_Bot):
temp_data = common_db.where_one(TempConfig(), default=TempConfig()) temp_data = common_db.where_one(TempConfig(), default=TempConfig())
if isinstance(bot, satori.Bot): if isinstance(bot, satori.Bot):
await satori_utils.user_infos.load_friends(bot) await satori_utils.user_infos.load_friends(bot)
# 用于重启计时 # 用于重启计时
if temp_data.data.get("reload", False): if temp_data.data.get("reload", False):
temp_data.data["reload"] = False temp_data.data["reload"] = False
reload_bot_id = temp_data.data.get("reload_bot_id", 0) reload_bot_id = temp_data.data.get("reload_bot_id", 0)
if reload_bot_id != bot.self_id: if reload_bot_id != bot.self_id:
return return
reload_session_type = temp_data.data.get("reload_session_type", "private") reload_session_type = temp_data.data.get("reload_session_type", "private")
reload_session_id = temp_data.data.get("reload_session_id", 0) reload_session_id = temp_data.data.get("reload_session_id", 0)
delta_time = temp_data.data.get("delta_time", 0) delta_time = temp_data.data.get("delta_time", 0)
common_db.save(temp_data) # 更新数据 common_db.save(temp_data) # 更新数据
if delta_time <= 20.0: # 启动时间太长就别发了,丢人 if delta_time <= 20.0: # 启动时间太长就别发了,丢人
if isinstance(bot, satori.Bot): if isinstance(bot, satori.Bot):
await bot.send_message( await bot.send_message(
channel_id=reload_session_id, channel_id=reload_session_id,
message="Liteyuki reloaded in %.2f s" % delta_time message="Liteyuki reloaded in %.2f s" % delta_time
) )
elif isinstance(bot, onebot.v11.Bot): elif isinstance(bot, onebot.v11.Bot):
await bot.send_msg( await bot.send_msg(
message_type=reload_session_type, message_type=reload_session_type,
user_id=reload_session_id, user_id=reload_session_id,
group_id=reload_session_id, group_id=reload_session_id,
message="Liteyuki reloaded in %.2f s" % delta_time message="Liteyuki reloaded in %.2f s" % delta_time
) )
elif isinstance(bot, onebot.v12.Bot): elif isinstance(bot, onebot.v12.Bot):
await bot.send_message( await bot.send_message(
message_type=reload_session_type, message_type=reload_session_type,
user_id=reload_session_id, user_id=reload_session_id,
group_id=reload_session_id, group_id=reload_session_id,
message="Liteyuki reloaded in %.2f s" % delta_time, message="Liteyuki reloaded in %.2f s" % delta_time,
detail_type="group" detail_type="group"
) )
# 每天4点更新 # 每天4点更新
@scheduler.scheduled_job("cron", hour=4) @scheduler.scheduled_job("cron", hour=4)
async def every_day_update(): async def every_day_update():
if get_config("auto_update", default=True): if get_config("auto_update", default=True):
result, logs = update_liteyuki() result, logs = update_liteyuki()
pip.main(["install", "-r", "requirements.txt"]) pip.main(["install", "-r", "requirements.txt"])
if result: if result:
await broadcast_to_superusers(f"Liteyuki updated: ```\n{logs}\n```") await broadcast_to_superusers(f"Liteyuki updated: ```\n{logs}\n```")
nonebot.logger.info(f"Liteyuki updated: {logs}") nonebot.logger.info(f"Liteyuki updated: {logs}")
reload() reload()
else: else:
nonebot.logger.info(logs) nonebot.logger.info(logs)
# 需要用户id的api # 需要用户id的api
need_user_id = ( need_user_id = (
"send_private_msg", "send_private_msg",
"send_msg", "send_msg",
"set_group_card", "set_group_card",
"set_group_special_title", "set_group_special_title",
"get_stranger_info", "get_stranger_info",
"get_group_member_info" "get_group_member_info"
) )
need_group_id = ( need_group_id = (
"send_group_msg", "send_group_msg",
"send_msg", "send_msg",
"set_group_card", "set_group_card",
"set_group_name", "set_group_name",
"set_group_special_title", "set_group_special_title",
"get_group_member_info", "get_group_member_info",
"get_group_member_list", "get_group_member_list",
"get_group_honor_info" "get_group_honor_info"
) )

View File

@ -1,33 +1,39 @@
import asyncio import asyncio
import os.path
import nonebot.plugin from pathlib import Path
from nonebot import get_driver
from src.utils import init_log import nonebot.plugin
from src.utils.base.config import get_config from nonebot import get_driver
from src.utils.base.data_manager import InstalledPlugin, plugin_db from src.utils import init_log
from src.utils.base.resource import load_resources from src.utils.base.config import get_config
from src.utils.message.tools import check_for_package from src.utils.base.data_manager import InstalledPlugin, plugin_db
from src.utils.base.resource import load_resources
load_resources() from src.utils.message.tools import check_for_package
init_log()
load_resources()
driver = get_driver() init_log()
driver = get_driver()
@driver.on_startup
async def load_plugins():
nonebot.plugin.load_plugins("src/nonebot_plugins") @driver.on_startup
# 从数据库读取已安装的插件 async def load_plugins():
if not get_config("safe_mode", False): print("load from", os.path.join(os.path.dirname(__file__), "../nonebot_plugins"))
# 安全模式下,不加载插件 nonebot.plugin.load_plugins(os.path.abspath(os.path.join(os.path.dirname(__file__), "../nonebot_plugins")))
installed_plugins: list[InstalledPlugin] = plugin_db.where_all(InstalledPlugin()) # 从数据库读取已安装的插件
if installed_plugins: if not get_config("safe_mode", False):
for installed_plugin in installed_plugins: # 安全模式下,不加载插件
if not check_for_package(installed_plugin.module_name): installed_plugins: list[InstalledPlugin] = plugin_db.where_all(
nonebot.logger.error( InstalledPlugin()
f"{installed_plugin.module_name} not installed, but still in loader index.") )
else: if installed_plugins:
nonebot.load_plugin(installed_plugin.module_name) for installed_plugin in installed_plugins:
nonebot.plugin.load_plugins("plugins") if not check_for_package(installed_plugin.module_name):
else: nonebot.logger.error(
nonebot.logger.info("Safe mode is on, no plugin loaded.") f"{installed_plugin.module_name} not installed, but still in loader index."
)
else:
nonebot.load_plugin(installed_plugin.module_name)
nonebot.plugin.load_plugins("plugins")
else:
nonebot.logger.info("Safe mode is on, no plugin loaded.")

View File

@ -1,16 +0,0 @@
from nonebot.plugin import PluginMetadata
from .auto_update import *
__author__ = "expliyh"
__plugin_meta__ = PluginMetadata(
name="Satori 用户数据自动更新(临时措施)",
description="",
usage="",
type="application",
homepage="https://github.com/snowykami/LiteyukiBot",
extra={
"liteyuki": True,
"toggleable" : True,
"default_enable" : True,
}
)

View File

@ -1,20 +0,0 @@
import nonebot
from nonebot.message import event_preprocessor
from src.utils.base.ly_typing import T_MessageEvent
from src.utils import satori_utils
from nonebot.adapters import satori
from nonebot_plugin_alconna.typings import Event
from src.nonebot_plugins.liteyuki_status.counter_for_satori import satori_counter
@event_preprocessor
async def pre_handle(event: Event):
if isinstance(event, satori.MessageEvent):
if event.user.id == event.self_id:
satori_counter.msg_sent += 1
else:
satori_counter.msg_received += 1
if event.user.name is not None:
if await satori_utils.user_infos.put(event.user):
nonebot.logger.info(f"Satori user {event.user.name}<{event.user.id}> updated")

View File

@ -1,42 +1,42 @@
import sys import sys
import nonebot import nonebot
__NAME__ = "LiteyukiBot" __NAME__ = "LiteyukiBot"
__VERSION__ = "6.3.2" # 60201 __VERSION__ = "6.3.2" # 60201
from src.utils.base.config import load_from_yaml, config from src.utils.base.config import load_from_yaml, config
from src.utils.base.log import init_log from src.utils.base.log import init_log
from git import Repo from git import Repo
major, minor, patch = map(int, __VERSION__.split(".")) major, minor, patch = map(int, __VERSION__.split("."))
__VERSION_I__ = major * 10000 + minor * 100 + patch __VERSION_I__ = major * 10000 + minor * 100 + patch
def init(): def init():
""" """
初始化 初始化
Returns: Returns:
""" """
# 检测python版本是否高于3.10 # 检测python版本是否高于3.10
init_log() init_log()
if sys.version_info < (3, 10): if sys.version_info < (3, 10):
nonebot.logger.error("Requires Python3.10+ to run, please upgrade your Python Environment.") nonebot.logger.error("Requires Python3.10+ to run, please upgrade your Python Environment.")
exit(1) exit(1)
try: try:
# 检测git仓库 # 检测git仓库
repo = Repo(".") repo = Repo(".")
except Exception as e: except Exception as e:
nonebot.logger.error(f"Failed to load git repository: {e}, please clone this project from GitHub instead of downloading the zip file.") nonebot.logger.error(f"Failed to load git repository: {e}, please clone this project from GitHub instead of downloading the zip file.")
# temp_data: TempConfig = common_db.where_one(TempConfig(), default=TempConfig()) # temp_data: TempConfig = common_db.where_one(TempConfig(), default=TempConfig())
# temp_data.data["start_time"] = time.time() # temp_data.data["start_time"] = time.time()
# common_db.save(temp_data) # common_db.save(temp_data)
nonebot.logger.info( nonebot.logger.info(
f"Run Liteyuki-NoneBot with Python{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro} " f"Run Liteyuki-NoneBot with Python{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro} "
f"at {sys.executable}" f"at {sys.executable}"
) )
nonebot.logger.info(f"{__NAME__} {__VERSION__}({__VERSION_I__}) is running") nonebot.logger.info(f"{__NAME__} {__VERSION__}({__VERSION_I__}) is running")

View File

@ -1,109 +1,109 @@
import os import os
import platform import platform
from typing import List from typing import List
import nonebot import nonebot
import yaml import yaml
from pydantic import BaseModel from pydantic import BaseModel
from ..message.tools import random_hex_string from ..message.tools import random_hex_string
config = {} # 全局配置,确保加载后读取 config = {} # 全局配置,确保加载后读取
class SatoriNodeConfig(BaseModel): class SatoriNodeConfig(BaseModel):
host: str = "" host: str = ""
port: str = "5500" port: str = "5500"
path: str = "" path: str = ""
token: str = "" token: str = ""
class SatoriConfig(BaseModel): class SatoriConfig(BaseModel):
comment: str = ( comment: str = (
"These features are still in development. Do not enable in production environment." "These features are still in development. Do not enable in production environment."
) )
enable: bool = False enable: bool = False
hosts: List[SatoriNodeConfig] = [SatoriNodeConfig()] hosts: List[SatoriNodeConfig] = [SatoriNodeConfig()]
class BasicConfig(BaseModel): class BasicConfig(BaseModel):
host: str = "127.0.0.1" host: str = "127.0.0.1"
port: int = 20216 port: int = 20216
superusers: list[str] = [] superusers: list[str] = []
command_start: list[str] = ["/", ""] command_start: list[str] = ["/", ""]
nickname: list[str] = [f"LiteyukiBot-{random_hex_string(6)}"] nickname: list[str] = [f"LiteyukiBot-{random_hex_string(6)}"]
satori: SatoriConfig = SatoriConfig() satori: SatoriConfig = SatoriConfig()
data_path: str = "data/liteyuki" data_path: str = "data/liteyuki"
chromium_path: str = ( chromium_path: str = (
"/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome" # type: ignore "/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome" # type: ignore
if platform.system() == "Darwin" if platform.system() == "Darwin"
else ( else (
"C:/Program Files (x86)/Microsoft/Edge/Application/msedge.exe" "C:/Program Files (x86)/Microsoft/Edge/Application/msedge.exe"
if platform.system() == "Windows" if platform.system() == "Windows"
else "/usr/bin/chromium-browser" else "/usr/bin/chromium-browser"
) )
) )
def load_from_yaml(file_: str) -> dict: def load_from_yaml(file_: str) -> dict:
global config global config
nonebot.logger.debug("Loading config from %s" % file_) nonebot.logger.debug("Loading config from %s" % file_)
if not os.path.exists(file_): if not os.path.exists(file_):
nonebot.logger.warning( nonebot.logger.warning(
f"Config file {file_} not found, created default config, please modify it and restart" f"Config file {file_} not found, created default config, please modify it and restart"
) )
with open(file_, "w", encoding="utf-8") as f: with open(file_, "w", encoding="utf-8") as f:
yaml.dump(BasicConfig().dict(), f, default_flow_style=False) yaml.dump(BasicConfig().dict(), f, default_flow_style=False)
with open(file_, "r", encoding="utf-8") as f: with open(file_, "r", encoding="utf-8") as f:
conf = init_conf(yaml.load(f, Loader=yaml.FullLoader)) conf = init_conf(yaml.load(f, Loader=yaml.FullLoader))
config = conf config = conf
if conf is None: if conf is None:
nonebot.logger.warning( nonebot.logger.warning(
f"Config file {file_} is empty, use default config. please modify it and restart" f"Config file {file_} is empty, use default config. please modify it and restart"
) )
conf = BasicConfig().dict() conf = BasicConfig().dict()
return conf return conf
def get_config(key: str, default=None): def get_config(key: str, default=None):
"""获取配置项优先级bot > config > db > yaml""" """获取配置项优先级bot > config > db > yaml"""
try: try:
bot = nonebot.get_bot() bot = nonebot.get_bot()
except: except:
bot = None bot = None
if bot is None: if bot is None:
bot_config = {} bot_config = {}
else: else:
bot_config = bot.config.dict() bot_config = bot.config.dict()
if key in bot_config: if key in bot_config:
return bot_config[key] return bot_config[key]
elif key in config: elif key in config:
return config[key] return config[key]
elif key in load_from_yaml("config.yml"): elif key in load_from_yaml("config.yml"):
return load_from_yaml("config.yml")[key] return load_from_yaml("config.yml")[key]
else: else:
return default return default
def init_conf(conf: dict) -> dict: def init_conf(conf: dict) -> dict:
""" """
初始化配置文件确保配置文件中的必要字段存在且不会冲突 初始化配置文件确保配置文件中的必要字段存在且不会冲突
Args: Args:
conf: conf:
Returns: Returns:
""" """
# 若command_start中无""则添加必要命令头开启alconna_use_command_start防止冲突 # 若command_start中无""则添加必要命令头开启alconna_use_command_start防止冲突
# 以下内容由于issue #53 被注释 # 以下内容由于issue #53 被注释
# if "" not in conf.get("command_start", []): # if "" not in conf.get("command_start", []):
# conf["alconna_use_command_start"] = True # conf["alconna_use_command_start"] = True
return conf return conf
pass pass

View File

@ -1,436 +1,436 @@
import inspect import inspect
import os import os
import pickle import pickle
import sqlite3 import sqlite3
from types import NoneType from types import NoneType
from typing import Any, Callable from typing import Any, Callable
from nonebot import logger from nonebot import logger
from nonebot.compat import PYDANTIC_V2 from nonebot.compat import PYDANTIC_V2
from pydantic import BaseModel from pydantic import BaseModel
class LiteModel(BaseModel): class LiteModel(BaseModel):
TABLE_NAME: str = None TABLE_NAME: str = None
id: int = None id: int = None
def dump(self, *args, **kwargs): def dump(self, *args, **kwargs):
if PYDANTIC_V2: if PYDANTIC_V2:
return self.model_dump(*args, **kwargs) return self.model_dump(*args, **kwargs)
else: else:
return self.dict(*args, **kwargs) return self.dict(*args, **kwargs)
class Database: class Database:
def __init__(self, db_name: str): def __init__(self, db_name: str):
if os.path.dirname(db_name) != "" and not os.path.exists(os.path.dirname(db_name)): if os.path.dirname(db_name) != "" and not os.path.exists(os.path.dirname(db_name)):
os.makedirs(os.path.dirname(db_name)) os.makedirs(os.path.dirname(db_name))
self.db_name = db_name self.db_name = db_name
self.conn = sqlite3.connect(db_name, check_same_thread=False) self.conn = sqlite3.connect(db_name, check_same_thread=False)
self.cursor = self.conn.cursor() self.cursor = self.conn.cursor()
self._on_save_callbacks = [] self._on_save_callbacks = []
self._is_locked = False self._is_locked = False
def lock(self): def lock(self):
self.cursor.execute("BEGIN TRANSACTION") self.cursor.execute("BEGIN TRANSACTION")
self._is_locked = True self._is_locked = True
def lock_query(self, query: str, *args): def lock_query(self, query: str, *args):
"""锁定查询""" """锁定查询"""
self.cursor.execute(query, args).fetchall() self.cursor.execute(query, args).fetchall()
def lock_model(self, model: LiteModel) -> LiteModel | Any | None: def lock_model(self, model: LiteModel) -> LiteModel | Any | None:
"""锁定行 """锁定行
Args: Args:
model: 数据模型实例 model: 数据模型实例
Returns: Returns:
""" """
pass pass
def unlock(self): def unlock(self):
self.cursor.execute("COMMIT") self.cursor.execute("COMMIT")
self._is_locked = False self._is_locked = False
def where_one(self, model: LiteModel, condition: str = "", *args: Any, default: Any = None) -> LiteModel | Any | None: def where_one(self, model: LiteModel, condition: str = "", *args: Any, default: Any = None) -> LiteModel | Any | None:
"""查询第一个 """查询第一个
Args: Args:
model: 数据模型实例 model: 数据模型实例
condition: 查询条件不给定则查询所有 condition: 查询条件不给定则查询所有
*args: 参数化查询参数 *args: 参数化查询参数
default: 默认值 default: 默认值
Returns: Returns:
""" """
all_results = self.where_all(model, condition, *args) all_results = self.where_all(model, condition, *args)
return all_results[0] if all_results else default return all_results[0] if all_results else default
def where_all(self, model: LiteModel, condition: str = "", *args: Any, default: Any = None) -> list[LiteModel | Any] | None: def where_all(self, model: LiteModel, condition: str = "", *args: Any, default: Any = None) -> list[LiteModel | Any] | None:
"""查询所有 """查询所有
Args: Args:
model: 数据模型实例 model: 数据模型实例
condition: 查询条件不给定则查询所有 condition: 查询条件不给定则查询所有
*args: 参数化查询参数 *args: 参数化查询参数
default: 默认值 default: 默认值
Returns: Returns:
""" """
table_name = model.TABLE_NAME table_name = model.TABLE_NAME
model_type = type(model) model_type = type(model)
logger.debug(f"Selecting {model.TABLE_NAME} WHERE {condition.replace('?', '%s') % args}") logger.debug(f"Selecting {model.TABLE_NAME} WHERE {condition.replace('?', '%s') % args}")
if not table_name: if not table_name:
raise ValueError(f"数据模型{model_type.__name__}未提供表名") raise ValueError(f"数据模型{model_type.__name__}未提供表名")
# condition = f"WHERE {condition}" # condition = f"WHERE {condition}"
# print(f"SELECT * FROM {table_name} {condition}", args) # print(f"SELECT * FROM {table_name} {condition}", args)
# if len(args) == 0: # if len(args) == 0:
# results = self.cursor.execute(f"SELECT * FROM {table_name} {condition}").fetchall() # results = self.cursor.execute(f"SELECT * FROM {table_name} {condition}").fetchall()
# else: # else:
# results = self.cursor.execute(f"SELECT * FROM {table_name} {condition}", args).fetchall() # results = self.cursor.execute(f"SELECT * FROM {table_name} {condition}", args).fetchall()
if condition: if condition:
results = self.cursor.execute(f"SELECT * FROM {table_name} WHERE {condition}", args).fetchall() results = self.cursor.execute(f"SELECT * FROM {table_name} WHERE {condition}", args).fetchall()
else: else:
results = self.cursor.execute(f"SELECT * FROM {table_name}").fetchall() results = self.cursor.execute(f"SELECT * FROM {table_name}").fetchall()
fields = [description[0] for description in self.cursor.description] fields = [description[0] for description in self.cursor.description]
if not results: if not results:
return default return default
else: else:
return [model_type(**self._load(dict(zip(fields, result)))) for result in results] return [model_type(**self._load(dict(zip(fields, result)))) for result in results]
def save(self, *args: LiteModel): def save(self, *args: LiteModel):
self.returns_ = """增/改操作 self.returns_ = """增/改操作
Args: Args:
*args: *args:
Returns: Returns:
""" """
table_list = [item[0] for item in self.cursor.execute("SELECT name FROM sqlite_master WHERE type ='table'").fetchall()] table_list = [item[0] for item in self.cursor.execute("SELECT name FROM sqlite_master WHERE type ='table'").fetchall()]
for model in args: for model in args:
logger.debug(f"Upserting {model}") logger.debug(f"Upserting {model}")
if not model.TABLE_NAME: if not model.TABLE_NAME:
raise ValueError(f"数据模型 {model.__class__.__name__} 未提供表名") raise ValueError(f"数据模型 {model.__class__.__name__} 未提供表名")
elif model.TABLE_NAME not in table_list: elif model.TABLE_NAME not in table_list:
raise ValueError(f"数据模型 {model.__class__.__name__}{model.TABLE_NAME} 不存在,请先迁移") raise ValueError(f"数据模型 {model.__class__.__name__}{model.TABLE_NAME} 不存在,请先迁移")
else: else:
self._save(model.dump(by_alias=True)) self._save(model.dump(by_alias=True))
for callback in self._on_save_callbacks: for callback in self._on_save_callbacks:
callback(model) callback(model)
def _save(self, obj: Any) -> Any: def _save(self, obj: Any) -> Any:
# obj = copy.deepcopy(obj) # obj = copy.deepcopy(obj)
if isinstance(obj, dict): if isinstance(obj, dict):
table_name = obj.get("TABLE_NAME") table_name = obj.get("TABLE_NAME")
row_id = obj.get("id") row_id = obj.get("id")
new_obj = {} new_obj = {}
for field, value in obj.items(): for field, value in obj.items():
if isinstance(value, self.ITERABLE_TYPE): if isinstance(value, self.ITERABLE_TYPE):
new_obj[self._get_stored_field_prefix(value) + field] = self._save(value) # self._save(value) # -> bytes new_obj[self._get_stored_field_prefix(value) + field] = self._save(value) # self._save(value) # -> bytes
elif isinstance(value, self.BASIC_TYPE): elif isinstance(value, self.BASIC_TYPE):
new_obj[field] = value new_obj[field] = value
else: else:
raise ValueError(f"数据模型{table_name}包含不支持的数据类型,字段:{field} 值:{value} 值类型:{type(value)}") raise ValueError(f"数据模型{table_name}包含不支持的数据类型,字段:{field} 值:{value} 值类型:{type(value)}")
if table_name: if table_name:
fields, values = [], [] fields, values = [], []
for n_field, n_value in new_obj.items(): for n_field, n_value in new_obj.items():
if n_field not in ["TABLE_NAME", "id"]: if n_field not in ["TABLE_NAME", "id"]:
fields.append(n_field) fields.append(n_field)
values.append(n_value) values.append(n_value)
# 移除TABLE_NAME和id # 移除TABLE_NAME和id
fields = list(fields) fields = list(fields)
values = list(values) values = list(values)
if row_id is not None: if row_id is not None:
# 如果 _id 不为空,将 'id' 插入到字段列表的开始 # 如果 _id 不为空,将 'id' 插入到字段列表的开始
fields.insert(0, 'id') fields.insert(0, 'id')
# 将 _id 插入到值列表的开始 # 将 _id 插入到值列表的开始
values.insert(0, row_id) values.insert(0, row_id)
fields = ', '.join([f'"{field}"' for field in fields]) fields = ', '.join([f'"{field}"' for field in fields])
placeholders = ', '.join('?' for _ in values) placeholders = ', '.join('?' for _ in values)
self.cursor.execute(f"INSERT OR REPLACE INTO {table_name}({fields}) VALUES ({placeholders})", tuple(values)) self.cursor.execute(f"INSERT OR REPLACE INTO {table_name}({fields}) VALUES ({placeholders})", tuple(values))
# self.conn.commit() # self.conn.commit()
if self._is_locked: if self._is_locked:
pass pass
else: else:
self.conn.commit() self.conn.commit()
foreign_id = self.cursor.execute("SELECT last_insert_rowid()").fetchone()[0] foreign_id = self.cursor.execute("SELECT last_insert_rowid()").fetchone()[0]
return f"{self.FOREIGN_KEY_PREFIX}{foreign_id}@{table_name}" # -> FOREIGN_KEY_123456@{table_name} id@{table_name} return f"{self.FOREIGN_KEY_PREFIX}{foreign_id}@{table_name}" # -> FOREIGN_KEY_123456@{table_name} id@{table_name}
else: else:
return pickle.dumps(new_obj) # -> bytes return pickle.dumps(new_obj) # -> bytes
elif isinstance(obj, (list, set, tuple)): elif isinstance(obj, (list, set, tuple)):
obj_type = type(obj) # 到时候转回去 obj_type = type(obj) # 到时候转回去
new_obj = [] new_obj = []
for item in obj: for item in obj:
if isinstance(item, self.ITERABLE_TYPE): if isinstance(item, self.ITERABLE_TYPE):
new_obj.append(self._save(item)) new_obj.append(self._save(item))
elif isinstance(item, self.BASIC_TYPE): elif isinstance(item, self.BASIC_TYPE):
new_obj.append(item) new_obj.append(item)
else: else:
raise ValueError(f"数据模型包含不支持的数据类型,值:{item} 值类型:{type(item)}") raise ValueError(f"数据模型包含不支持的数据类型,值:{item} 值类型:{type(item)}")
return pickle.dumps(obj_type(new_obj)) # -> bytes return pickle.dumps(obj_type(new_obj)) # -> bytes
else: else:
raise ValueError(f"数据模型包含不支持的数据类型,值:{obj} 值类型:{type(obj)}") raise ValueError(f"数据模型包含不支持的数据类型,值:{obj} 值类型:{type(obj)}")
def _load(self, obj: Any) -> Any: def _load(self, obj: Any) -> Any:
if isinstance(obj, dict): if isinstance(obj, dict):
new_obj = {} new_obj = {}
for field, value in obj.items(): for field, value in obj.items():
field: str field: str
if field.startswith(self.BYTES_PREFIX): if field.startswith(self.BYTES_PREFIX):
if isinstance(value, bytes): if isinstance(value, bytes):
new_obj[field.replace(self.BYTES_PREFIX, "")] = self._load(pickle.loads(value)) new_obj[field.replace(self.BYTES_PREFIX, "")] = self._load(pickle.loads(value))
else: # 从value字段可能为Nonefix at 2024/6/13 else: # 从value字段可能为Nonefix at 2024/6/13
pass pass
# 暂时不作处理,后面再修 # 暂时不作处理,后面再修
elif field.startswith(self.FOREIGN_KEY_PREFIX): elif field.startswith(self.FOREIGN_KEY_PREFIX):
new_obj[field.replace(self.FOREIGN_KEY_PREFIX, "")] = self._load(self._get_foreign_data(value)) new_obj[field.replace(self.FOREIGN_KEY_PREFIX, "")] = self._load(self._get_foreign_data(value))
else: else:
new_obj[field] = value new_obj[field] = value
return new_obj return new_obj
elif isinstance(obj, (list, set, tuple)): elif isinstance(obj, (list, set, tuple)):
new_obj = [] new_obj = []
for item in obj: for item in obj:
if isinstance(item, bytes): if isinstance(item, bytes):
# 对bytes进行尝试解析解析失败则返回原始bytes # 对bytes进行尝试解析解析失败则返回原始bytes
try: try:
new_obj.append(self._load(pickle.loads(item))) new_obj.append(self._load(pickle.loads(item)))
except Exception as e: except Exception as e:
new_obj.append(self._load(item)) new_obj.append(self._load(item))
elif isinstance(item, str) and item.startswith(self.FOREIGN_KEY_PREFIX): elif isinstance(item, str) and item.startswith(self.FOREIGN_KEY_PREFIX):
new_obj.append(self._load(self._get_foreign_data(item))) new_obj.append(self._load(self._get_foreign_data(item)))
else: else:
new_obj.append(self._load(item)) new_obj.append(self._load(item))
return new_obj return new_obj
else: else:
return obj return obj
def delete(self, model: LiteModel, condition: str, *args: Any, allow_empty: bool = False): def delete(self, model: LiteModel, condition: str, *args: Any, allow_empty: bool = False):
""" """
删除满足条件的数据 删除满足条件的数据
Args: Args:
allow_empty: 允许空条件删除整个表 allow_empty: 允许空条件删除整个表
model: model:
condition: condition:
*args: *args:
Returns: Returns:
""" """
table_name = model.TABLE_NAME table_name = model.TABLE_NAME
logger.debug(f"Deleting {model} WHERE {condition} {args}") logger.debug(f"Deleting {model} WHERE {condition} {args}")
if not table_name: if not table_name:
raise ValueError(f"数据模型{model.__class__.__name__}未提供表名") raise ValueError(f"数据模型{model.__class__.__name__}未提供表名")
if model.id is not None: if model.id is not None:
condition = f"id = {model.id}" condition = f"id = {model.id}"
if not condition and not allow_empty: if not condition and not allow_empty:
raise ValueError("删除操作必须提供条件") raise ValueError("删除操作必须提供条件")
self.cursor.execute(f"DELETE FROM {table_name} WHERE {condition}", args) self.cursor.execute(f"DELETE FROM {table_name} WHERE {condition}", args)
if self._is_locked: if self._is_locked:
pass pass
else: else:
self.conn.commit() self.conn.commit()
def auto_migrate(self, *args: LiteModel): def auto_migrate(self, *args: LiteModel):
""" """
自动迁移模型 自动迁移模型
Args: Args:
*args: 模型类实例化对象支持空默认值不支持嵌套迁移 *args: 模型类实例化对象支持空默认值不支持嵌套迁移
Returns: Returns:
""" """
for model in args: for model in args:
if not model.TABLE_NAME: if not model.TABLE_NAME:
raise ValueError(f"数据模型{type(model).__name__}未提供表名") raise ValueError(f"数据模型{type(model).__name__}未提供表名")
# 若无则创建表 # 若无则创建表
self.cursor.execute( self.cursor.execute(
f'CREATE TABLE IF NOT EXISTS "{model.TABLE_NAME}" (id INTEGER PRIMARY KEY AUTOINCREMENT)' f'CREATE TABLE IF NOT EXISTS "{model.TABLE_NAME}" (id INTEGER PRIMARY KEY AUTOINCREMENT)'
) )
# 获取表结构,field -> SqliteType # 获取表结构,field -> SqliteType
new_structure = {} new_structure = {}
for n_field, n_value in model.dump(by_alias=True).items(): for n_field, n_value in model.dump(by_alias=True).items():
if n_field not in ["TABLE_NAME", "id"]: if n_field not in ["TABLE_NAME", "id"]:
new_structure[self._get_stored_field_prefix(n_value) + n_field] = self._get_stored_type(n_value) new_structure[self._get_stored_field_prefix(n_value) + n_field] = self._get_stored_type(n_value)
# 原有的字段列表 # 原有的字段列表
existing_structure = dict([(column[1], column[2]) for column in self.cursor.execute(f'PRAGMA table_info({model.TABLE_NAME})').fetchall()]) existing_structure = dict([(column[1], column[2]) for column in self.cursor.execute(f'PRAGMA table_info({model.TABLE_NAME})').fetchall()])
# 检测缺失字段由于SQLite是动态类型所以不需要检测类型 # 检测缺失字段由于SQLite是动态类型所以不需要检测类型
for n_field, n_type in new_structure.items(): for n_field, n_type in new_structure.items():
if n_field not in existing_structure.keys() and n_field.lower() not in ["id", "table_name"]: if n_field not in existing_structure.keys() and n_field.lower() not in ["id", "table_name"]:
default_value = self.DEFAULT_MAPPING.get(n_type, 'NULL') default_value = self.DEFAULT_MAPPING.get(n_type, 'NULL')
self.cursor.execute( self.cursor.execute(
f"ALTER TABLE '{model.TABLE_NAME}' ADD COLUMN {n_field} {n_type} DEFAULT {self.DEFAULT_MAPPING.get(n_type, default_value)}" f"ALTER TABLE '{model.TABLE_NAME}' ADD COLUMN {n_field} {n_type} DEFAULT {self.DEFAULT_MAPPING.get(n_type, default_value)}"
) )
# 检测多余字段进行删除 # 检测多余字段进行删除
for e_field in existing_structure.keys(): for e_field in existing_structure.keys():
if e_field not in new_structure.keys() and e_field.lower() not in ['id']: if e_field not in new_structure.keys() and e_field.lower() not in ['id']:
self.cursor.execute( self.cursor.execute(
f'ALTER TABLE "{model.TABLE_NAME}" DROP COLUMN "{e_field}"' f'ALTER TABLE "{model.TABLE_NAME}" DROP COLUMN "{e_field}"'
) )
self.conn.commit() self.conn.commit()
# 已完成 # 已完成
def _get_stored_field_prefix(self, value) -> str: def _get_stored_field_prefix(self, value) -> str:
"""根据类型获取存储字段前缀,一定在后加上字段名 """根据类型获取存储字段前缀,一定在后加上字段名
* -> "" * -> ""
Args: Args:
value: 储存的值 value: 储存的值
Returns: Returns:
Sqlite3存储字段 Sqlite3存储字段
""" """
if isinstance(value, LiteModel) or isinstance(value, dict) and "TABLE_NAME" in value: if isinstance(value, LiteModel) or isinstance(value, dict) and "TABLE_NAME" in value:
return self.FOREIGN_KEY_PREFIX return self.FOREIGN_KEY_PREFIX
elif type(value) in self.ITERABLE_TYPE: elif type(value) in self.ITERABLE_TYPE:
return self.BYTES_PREFIX return self.BYTES_PREFIX
return "" return ""
def _get_stored_type(self, value) -> str: def _get_stored_type(self, value) -> str:
"""获取存储类型 """获取存储类型
Args: Args:
value: 储存的值 value: 储存的值
Returns: Returns:
Sqlite3存储类型 Sqlite3存储类型
""" """
if isinstance(value, dict) and "TABLE_NAME" in value: if isinstance(value, dict) and "TABLE_NAME" in value:
# 是一个模型字典,储存外键 # 是一个模型字典,储存外键
return "INTEGER" return "INTEGER"
return self.TYPE_MAPPING.get(type(value), "TEXT") return self.TYPE_MAPPING.get(type(value), "TEXT")
def _get_foreign_data(self, foreign_value: str) -> dict: def _get_foreign_data(self, foreign_value: str) -> dict:
""" """
获取外键数据 获取外键数据
Args: Args:
foreign_value: foreign_value:
Returns: Returns:
""" """
foreign_value = foreign_value.replace(self.FOREIGN_KEY_PREFIX, "") foreign_value = foreign_value.replace(self.FOREIGN_KEY_PREFIX, "")
table_name = foreign_value.split("@")[-1] table_name = foreign_value.split("@")[-1]
foreign_id = foreign_value.split("@")[0] foreign_id = foreign_value.split("@")[0]
fields = [description[1] for description in self.cursor.execute(f"PRAGMA table_info({table_name})").fetchall()] fields = [description[1] for description in self.cursor.execute(f"PRAGMA table_info({table_name})").fetchall()]
result = self.cursor.execute(f"SELECT * FROM {table_name} WHERE id = ?", (foreign_id,)).fetchone() result = self.cursor.execute(f"SELECT * FROM {table_name} WHERE id = ?", (foreign_id,)).fetchone()
return dict(zip(fields, result)) return dict(zip(fields, result))
def on_save(self, func: Callable[[LiteModel | Any], None]): def on_save(self, func: Callable[[LiteModel | Any], None]):
""" """
装饰一个可调用对象使其在储存数据模型时被调用 装饰一个可调用对象使其在储存数据模型时被调用
Args: Args:
func: func:
Returns: Returns:
""" """
def wrapper(model): def wrapper(model):
# 检查被装饰函数声明的model类型和传入的model类型是否一致 # 检查被装饰函数声明的model类型和传入的model类型是否一致
sign = inspect.signature(func) sign = inspect.signature(func)
if param := sign.parameters.get("model"): if param := sign.parameters.get("model"):
if isinstance(model, param.annotation): if isinstance(model, param.annotation):
pass pass
else: else:
return return
else: else:
return return
result = func(model) result = func(model)
for callback in self._on_save_callbacks: for callback in self._on_save_callbacks:
callback(result) callback(result)
return result return result
self._on_save_callbacks.append(wrapper) self._on_save_callbacks.append(wrapper)
return wrapper return wrapper
TYPE_MAPPING = { TYPE_MAPPING = {
int : "INTEGER", int : "INTEGER",
float : "REAL", float : "REAL",
str : "TEXT", str : "TEXT",
bool : "INTEGER", bool : "INTEGER",
bytes : "BLOB", bytes : "BLOB",
NoneType : "NULL", NoneType : "NULL",
# dict : "TEXT", # dict : "TEXT",
# list : "TEXT", # list : "TEXT",
# tuple : "TEXT", # tuple : "TEXT",
# set : "TEXT", # set : "TEXT",
dict : "BLOB", # LITEYUKIDICT{key_name} dict : "BLOB", # LITEYUKIDICT{key_name}
list : "BLOB", # LITEYUKILIST{key_name} list : "BLOB", # LITEYUKILIST{key_name}
tuple : "BLOB", # LITEYUKITUPLE{key_name} tuple : "BLOB", # LITEYUKITUPLE{key_name}
set : "BLOB", # LITEYUKISET{key_name} set : "BLOB", # LITEYUKISET{key_name}
LiteModel: "TEXT" # FOREIGN_KEY_{table_name} LiteModel: "TEXT" # FOREIGN_KEY_{table_name}
} }
DEFAULT_MAPPING = { DEFAULT_MAPPING = {
"TEXT" : "''", "TEXT" : "''",
"INTEGER": 0, "INTEGER": 0,
"REAL" : 0.0, "REAL" : 0.0,
"BLOB" : None, "BLOB" : None,
"NULL" : None "NULL" : None
} }
# 基础类型 # 基础类型
BASIC_TYPE = (int, float, str, bool, bytes, NoneType) BASIC_TYPE = (int, float, str, bool, bytes, NoneType)
# 可序列化类型 # 可序列化类型
ITERABLE_TYPE = (dict, list, tuple, set, LiteModel) ITERABLE_TYPE = (dict, list, tuple, set, LiteModel)
# 外键前缀 # 外键前缀
FOREIGN_KEY_PREFIX = "FOREIGN_KEY_" FOREIGN_KEY_PREFIX = "FOREIGN_KEY_"
# 转换为的字节前缀 # 转换为的字节前缀
BYTES_PREFIX = "PICKLE_BYTES_" BYTES_PREFIX = "PICKLE_BYTES_"
# transaction tx 事务操作 # transaction tx 事务操作
def first(self, model: LiteModel) -> "Database": def first(self, model: LiteModel) -> "Database":
pass pass
def where(self, condition: str, *args) -> "Database": def where(self, condition: str, *args) -> "Database":
pass pass
def limit(self, limit: int) -> "Database": def limit(self, limit: int) -> "Database":
pass pass
def order(self, order: str) -> "Database": def order(self, order: str) -> "Database":
pass pass
def check_sqlite_keyword(name): def check_sqlite_keyword(name):
sqlite_keywords = [ sqlite_keywords = [
"ABORT", "ACTION", "ADD", "AFTER", "ALL", "ALTER", "ANALYZE", "AND", "AS", "ASC", "ABORT", "ACTION", "ADD", "AFTER", "ALL", "ALTER", "ANALYZE", "AND", "AS", "ASC",
"ATTACH", "AUTOINCREMENT", "BEFORE", "BEGIN", "BETWEEN", "BY", "CASCADE", "CASE", "ATTACH", "AUTOINCREMENT", "BEFORE", "BEGIN", "BETWEEN", "BY", "CASCADE", "CASE",
"CAST", "CHECK", "COLLATE", "COLUMN", "COMMIT", "CONFLICT", "CONSTRAINT", "CREATE", "CAST", "CHECK", "COLLATE", "COLUMN", "COMMIT", "CONFLICT", "CONSTRAINT", "CREATE",
"CROSS", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DATABASE", "DEFAULT", "CROSS", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DATABASE", "DEFAULT",
"DEFERRABLE", "DEFERRED", "DELETE", "DESC", "DETACH", "DISTINCT", "DROP", "EACH", "DEFERRABLE", "DEFERRED", "DELETE", "DESC", "DETACH", "DISTINCT", "DROP", "EACH",
"ELSE", "END", "ESCAPE", "EXCEPT", "EXCLUSIVE", "EXISTS", "EXPLAIN", "FAIL", "FOR", "ELSE", "END", "ESCAPE", "EXCEPT", "EXCLUSIVE", "EXISTS", "EXPLAIN", "FAIL", "FOR",
"FOREIGN", "FROM", "FULL", "GLOB", "GROUP", "HAVING", "IF", "IGNORE", "IMMEDIATE", "FOREIGN", "FROM", "FULL", "GLOB", "GROUP", "HAVING", "IF", "IGNORE", "IMMEDIATE",
"IN", "INDEX", "INDEXED", "INITIALLY", "INNER", "INSERT", "INSTEAD", "INTERSECT", "IN", "INDEX", "INDEXED", "INITIALLY", "INNER", "INSERT", "INSTEAD", "INTERSECT",
"INTO", "IS", "ISNULL", "JOIN", "KEY", "LEFT", "LIKE", "LIMIT", "MATCH", "NATURAL", "INTO", "IS", "ISNULL", "JOIN", "KEY", "LEFT", "LIKE", "LIMIT", "MATCH", "NATURAL",
"NO", "NOT", "NOTNULL", "NULL", "OF", "OFFSET", "ON", "OR", "ORDER", "OUTER", "PLAN", "NO", "NOT", "NOTNULL", "NULL", "OF", "OFFSET", "ON", "OR", "ORDER", "OUTER", "PLAN",
"PRAGMA", "PRIMARY", "QUERY", "RAISE", "RECURSIVE", "REFERENCES", "REGEXP", "REINDEX", "PRAGMA", "PRIMARY", "QUERY", "RAISE", "RECURSIVE", "REFERENCES", "REGEXP", "REINDEX",
"RELEASE", "RENAME", "REPLACE", "RESTRICT", "RIGHT", "ROLLBACK", "ROW", "SAVEPOINT", "RELEASE", "RENAME", "REPLACE", "RESTRICT", "RIGHT", "ROLLBACK", "ROW", "SAVEPOINT",
"SELECT", "SET", "TABLE", "TEMP", "TEMPORARY", "THEN", "TO", "TRANSACTION", "TRIGGER", "SELECT", "SET", "TABLE", "TEMP", "TEMPORARY", "THEN", "TO", "TRANSACTION", "TRIGGER",
"UNION", "UNIQUE", "UPDATE", "USING", "VACUUM", "VALUES", "VIEW", "VIRTUAL", "WHEN", "UNION", "UNIQUE", "UPDATE", "USING", "VACUUM", "VALUES", "VIEW", "VIRTUAL", "WHEN",
"WHERE", "WITH", "WITHOUT" "WHERE", "WITH", "WITHOUT"
] ]
return True return True
# if name.upper() in sqlite_keywords: # if name.upper() in sqlite_keywords:
# raise ValueError(f"'{name}' 是SQLite保留字不建议使用请更换名称") # raise ValueError(f"'{name}' 是SQLite保留字不建议使用请更换名称")

View File

@ -1,99 +1,99 @@
import os import os
from pydantic import Field from pydantic import Field
from .data import Database, LiteModel from .data import Database, LiteModel
DATA_PATH = "data/liteyuki" DATA_PATH = "data/liteyuki"
user_db: Database = Database(os.path.join(DATA_PATH, "users.ldb")) user_db: Database = Database(os.path.join(DATA_PATH, "users.ldb"))
group_db: Database = Database(os.path.join(DATA_PATH, "groups.ldb")) group_db: Database = Database(os.path.join(DATA_PATH, "groups.ldb"))
plugin_db: Database = Database(os.path.join(DATA_PATH, "plugins.ldb")) plugin_db: Database = Database(os.path.join(DATA_PATH, "plugins.ldb"))
common_db: Database = Database(os.path.join(DATA_PATH, "common.ldb")) common_db: Database = Database(os.path.join(DATA_PATH, "common.ldb"))
# 内存数据库,临时用于存储数据 # 内存数据库,临时用于存储数据
memory_database = { memory_database = {
} }
class User(LiteModel): class User(LiteModel):
TABLE_NAME: str = "user" TABLE_NAME: str = "user"
user_id: str = Field(str(), alias="user_id") user_id: str = Field(str(), alias="user_id")
username: str = Field(str(), alias="username") username: str = Field(str(), alias="username")
profile: dict[str, str] = Field(dict(), alias="profile") profile: dict[str, str] = Field(dict(), alias="profile")
enabled_plugins: list[str] = Field(list(), alias="enabled_plugins") enabled_plugins: list[str] = Field(list(), alias="enabled_plugins")
disabled_plugins: list[str] = Field(list(), alias="disabled_plugins") disabled_plugins: list[str] = Field(list(), alias="disabled_plugins")
class Group(LiteModel): class Group(LiteModel):
TABLE_NAME: str = "group_chat" TABLE_NAME: str = "group_chat"
# Group是一个关键字所以这里用GroupChat # Group是一个关键字所以这里用GroupChat
group_id: str = Field(str(), alias="group_id") group_id: str = Field(str(), alias="group_id")
group_name: str = Field(str(), alias="group_name") group_name: str = Field(str(), alias="group_name")
enabled_plugins: list[str] = Field([], alias="enabled_plugins") enabled_plugins: list[str] = Field([], alias="enabled_plugins")
disabled_plugins: list[str] = Field([], alias="disabled_plugins") disabled_plugins: list[str] = Field([], alias="disabled_plugins")
enable: bool = Field(True, alias="enable") # 群聊全局机器人是否启用 enable: bool = Field(True, alias="enable") # 群聊全局机器人是否启用
config: dict = Field({}, alias="config") config: dict = Field({}, alias="config")
class InstalledPlugin(LiteModel): class InstalledPlugin(LiteModel):
TABLE_NAME: str = "installed_plugin" TABLE_NAME: str = "installed_plugin"
module_name: str = Field(str(), alias="module_name") module_name: str = Field(str(), alias="module_name")
version: str = Field(str(), alias="version") version: str = Field(str(), alias="version")
class GlobalPlugin(LiteModel): class GlobalPlugin(LiteModel):
TABLE_NAME: str = "global_plugin" TABLE_NAME: str = "global_plugin"
liteyuki: bool = Field(True, alias="liteyuki") # 是否为LiteYuki插件 liteyuki: bool = Field(True, alias="liteyuki") # 是否为LiteYuki插件
module_name: str = Field(str(), alias="module_name") module_name: str = Field(str(), alias="module_name")
enabled: bool = Field(True, alias="enabled") enabled: bool = Field(True, alias="enabled")
class StoredConfig(LiteModel): class StoredConfig(LiteModel):
TABLE_NAME: str = "stored_config" TABLE_NAME: str = "stored_config"
config: dict = {} config: dict = {}
class TempConfig(LiteModel): class TempConfig(LiteModel):
"""储存临时键值对的表""" """储存临时键值对的表"""
TABLE_NAME: str = "temp_data" TABLE_NAME: str = "temp_data"
data: dict = {} data: dict = {}
def auto_migrate(): def auto_migrate():
user_db.auto_migrate(User()) user_db.auto_migrate(User())
group_db.auto_migrate(Group()) group_db.auto_migrate(Group())
plugin_db.auto_migrate(InstalledPlugin(), GlobalPlugin()) plugin_db.auto_migrate(InstalledPlugin(), GlobalPlugin())
common_db.auto_migrate(GlobalPlugin(), TempConfig()) common_db.auto_migrate(GlobalPlugin(), TempConfig())
auto_migrate() auto_migrate()
def set_memory_data(key: str, value) -> None: def set_memory_data(key: str, value) -> None:
""" """
设置内存数据库的数据类似于redis 设置内存数据库的数据类似于redis
Args: Args:
key: key:
value: value:
Returns: Returns:
""" """
return memory_database.update({ return memory_database.update({
key: value key: value
}) })
def get_memory_data(key: str, default=None) -> any: def get_memory_data(key: str, default=None) -> any:
""" """
获取内存数据库的数据,类似于redis 获取内存数据库的数据,类似于redis
Args: Args:
key: key:
default: default:
Returns: Returns:
""" """
return memory_database.get(key, default) return memory_database.get(key, default)

View File

@ -1,237 +1,237 @@
""" """
语言模块,添加对多语言的支持 语言模块,添加对多语言的支持
""" """
import json import json
import locale import locale
import os import os
from typing import Any, overload from typing import Any, overload
import nonebot import nonebot
from .config import config, get_config from .config import config, get_config
from .data_manager import User, user_db from .data_manager import User, user_db
_language_data = { _language_data = {
"en": { "en": {
"name": "English", "name": "English",
} }
} }
_user_lang = {"user_id": "zh-CN"} _user_lang = {"user_id": "zh-CN"}
def load_from_lang(file_path: str, lang_code: str = None): def load_from_lang(file_path: str, lang_code: str = None):
""" """
从lang文件中加载语言数据用于简单的文本键值对 从lang文件中加载语言数据用于简单的文本键值对
Args: Args:
file_path: lang文件路径 file_path: lang文件路径
lang_code: 语言代码如果为None则从文件名中获取 lang_code: 语言代码如果为None则从文件名中获取
""" """
try: try:
if lang_code is None: if lang_code is None:
lang_code = os.path.basename(file_path).split(".")[0] lang_code = os.path.basename(file_path).split(".")[0]
with open(file_path, "r", encoding="utf-8") as file: with open(file_path, "r", encoding="utf-8") as file:
data = {} data = {}
for line in file: for line in file:
line = line.strip() line = line.strip()
if not line or line.startswith("#"): # 空行或注释 if not line or line.startswith("#"): # 空行或注释
continue continue
key, value = line.split("=", 1) key, value = line.split("=", 1)
data[key.strip()] = value.strip() data[key.strip()] = value.strip()
if lang_code not in _language_data: if lang_code not in _language_data:
_language_data[lang_code] = {} _language_data[lang_code] = {}
_language_data[lang_code].update(data) _language_data[lang_code].update(data)
nonebot.logger.debug(f"Loaded language data from {file_path}") nonebot.logger.debug(f"Loaded language data from {file_path}")
except Exception as e: except Exception as e:
nonebot.logger.error(f"Failed to load language data from {file_path}: {e}") nonebot.logger.error(f"Failed to load language data from {file_path}: {e}")
def load_from_json(file_path: str, lang_code: str = None): def load_from_json(file_path: str, lang_code: str = None):
""" """
从json文件中加载语言数据可以定义一些变量 从json文件中加载语言数据可以定义一些变量
Args: Args:
lang_code: 语言代码如果为None则从文件名中获取 lang_code: 语言代码如果为None则从文件名中获取
file_path: json文件路径 file_path: json文件路径
""" """
try: try:
if lang_code is None: if lang_code is None:
lang_code = os.path.basename(file_path).split(".")[0] lang_code = os.path.basename(file_path).split(".")[0]
with open(file_path, "r", encoding="utf-8") as file: with open(file_path, "r", encoding="utf-8") as file:
data = json.load(file) data = json.load(file)
if lang_code not in _language_data: if lang_code not in _language_data:
_language_data[lang_code] = {} _language_data[lang_code] = {}
_language_data[lang_code].update(data) _language_data[lang_code].update(data)
nonebot.logger.debug(f"Loaded language data from {file_path}") nonebot.logger.debug(f"Loaded language data from {file_path}")
except Exception as e: except Exception as e:
nonebot.logger.error(f"Failed to load language data from {file_path}: {e}") nonebot.logger.error(f"Failed to load language data from {file_path}: {e}")
def load_from_dir(dir_path: str): def load_from_dir(dir_path: str):
""" """
从目录中加载语言数据 从目录中加载语言数据
Args: Args:
dir_path: 目录路径 dir_path: 目录路径
""" """
for file in os.listdir(dir_path): for file in os.listdir(dir_path):
try: try:
file_path = os.path.join(dir_path, file) file_path = os.path.join(dir_path, file)
if os.path.isfile(file_path): if os.path.isfile(file_path):
if file.endswith(".lang"): if file.endswith(".lang"):
load_from_lang(file_path) load_from_lang(file_path)
elif file.endswith(".json"): elif file.endswith(".json"):
load_from_json(file_path) load_from_json(file_path)
except Exception as e: except Exception as e:
nonebot.logger.error(f"Failed to load language data from {file}: {e}") nonebot.logger.error(f"Failed to load language data from {file}: {e}")
continue continue
def load_from_dict(data: dict, lang_code: str): def load_from_dict(data: dict, lang_code: str):
""" """
从字典中加载语言数据 从字典中加载语言数据
Args: Args:
lang_code: 语言代码 lang_code: 语言代码
data: 字典数据 data: 字典数据
""" """
if lang_code not in _language_data: if lang_code not in _language_data:
_language_data[lang_code] = {} _language_data[lang_code] = {}
_language_data[lang_code].update(data) _language_data[lang_code].update(data)
class Language: class Language:
# 三重fallback # 三重fallback
# 用户语言 > 默认语言/系统语言 > zh-CN # 用户语言 > 默认语言/系统语言 > zh-CN
def __init__(self, lang_code: str = None, fallback_lang_code: str = None): def __init__(self, lang_code: str = None, fallback_lang_code: str = None):
self.lang_code = lang_code self.lang_code = lang_code
if self.lang_code is None: if self.lang_code is None:
self.lang_code = get_default_lang_code() self.lang_code = get_default_lang_code()
self.fallback_lang_code = fallback_lang_code self.fallback_lang_code = fallback_lang_code
if self.fallback_lang_code is None: if self.fallback_lang_code is None:
self.fallback_lang_code = config.get( self.fallback_lang_code = config.get(
"default_language", get_system_lang_code() "default_language", get_system_lang_code()
) )
def _get(self, item: str, *args, **kwargs) -> str | Any: def _get(self, item: str, *args, **kwargs) -> str | Any:
""" """
获取当前语言文本kwargs中的default参数为默认文本 获取当前语言文本kwargs中的default参数为默认文本
**请不要重写本函数** **请不要重写本函数**
Args: Args:
item: 文本键 item: 文本键
*args: 格式化参数 *args: 格式化参数
**kwargs: 格式化参数 **kwargs: 格式化参数
Returns: Returns:
str: 当前语言的文本 str: 当前语言的文本
""" """
default = kwargs.pop("default", None) default = kwargs.pop("default", None)
fallback = (self.lang_code, self.fallback_lang_code, "zh-CN") fallback = (self.lang_code, self.fallback_lang_code, "zh-CN")
for lang_code in fallback: for lang_code in fallback:
if lang_code in _language_data and item in _language_data[lang_code]: if lang_code in _language_data and item in _language_data[lang_code]:
trans: str = _language_data[lang_code][item] trans: str = _language_data[lang_code][item]
try: try:
return trans.format(*args, **kwargs) return trans.format(*args, **kwargs)
except Exception as e: except Exception as e:
nonebot.logger.warning(f"Failed to format language data: {e}") nonebot.logger.warning(f"Failed to format language data: {e}")
return trans return trans
return default or item return default or item
def get(self, item: str, *args, **kwargs) -> str | Any: def get(self, item: str, *args, **kwargs) -> str | Any:
""" """
获取当前语言文本kwargs中的default参数为默认文本 获取当前语言文本kwargs中的default参数为默认文本
Args: Args:
item: 文本键 item: 文本键
*args: 格式化参数 *args: 格式化参数
**kwargs: 格式化参数 **kwargs: 格式化参数
Returns: Returns:
str: 当前语言的文本 str: 当前语言的文本
""" """
return self._get(item, *args, **kwargs) return self._get(item, *args, **kwargs)
def get_many(self, *args: str, **kwargs) -> dict[str, str]: def get_many(self, *args: str, **kwargs) -> dict[str, str]:
""" """
获取多个文本 获取多个文本
Args: Args:
*args: 文本键 *args: 文本键
**kwargs: 文本键和默认文本 **kwargs: 文本键和默认文本
Returns: Returns:
dict: 多个文本 dict: 多个文本
""" """
args_data = {item: self.get(item) for item in args} args_data = {item: self.get(item) for item in args}
kwargs_data = { kwargs_data = {
item: self.get(item, default=default) for item, default in kwargs.items() item: self.get(item, default=default) for item, default in kwargs.items()
} }
args_data.update(kwargs_data) args_data.update(kwargs_data)
return args_data return args_data
def change_user_lang(user_id: str, lang_code: str): def change_user_lang(user_id: str, lang_code: str):
""" """
修改用户的语言同时储存到数据库和内存中 修改用户的语言同时储存到数据库和内存中
""" """
user = user_db.where_one( user = user_db.where_one(
User(), "user_id = ?", user_id, default=User(user_id=user_id) User(), "user_id = ?", user_id, default=User(user_id=user_id)
) )
user.profile["lang"] = lang_code user.profile["lang"] = lang_code
user_db.save(user) user_db.save(user)
_user_lang[user_id] = lang_code _user_lang[user_id] = lang_code
def get_user_lang(user_id: str) -> Language: def get_user_lang(user_id: str) -> Language:
""" """
获取用户的语言实例优先从内存中获取 获取用户的语言实例优先从内存中获取
""" """
user_id = str(user_id) user_id = str(user_id)
if user_id not in _user_lang: if user_id not in _user_lang:
nonebot.logger.debug(f"Loading user language for {user_id}") nonebot.logger.debug(f"Loading user language for {user_id}")
user = user_db.where_one( user = user_db.where_one(
User(), User(),
"user_id = ?", "user_id = ?",
user_id, user_id,
default=User(user_id=user_id, username="Unknown"), default=User(user_id=user_id, username="Unknown"),
) )
lang_code = user.profile.get("lang", get_default_lang_code()) lang_code = user.profile.get("lang", get_default_lang_code())
_user_lang[user_id] = lang_code _user_lang[user_id] = lang_code
return Language(_user_lang[user_id]) return Language(_user_lang[user_id])
def get_system_lang_code() -> str: def get_system_lang_code() -> str:
""" """
获取系统语言代码 获取系统语言代码
""" """
return locale.getdefaultlocale()[0].replace("_", "-") return locale.getdefaultlocale()[0].replace("_", "-")
def get_default_lang_code() -> str: def get_default_lang_code() -> str:
""" """
获取默认语言代码若没有设置则使用系统语言 获取默认语言代码若没有设置则使用系统语言
Returns: Returns:
""" """
return get_config("default_language", default=get_system_lang_code()) return get_config("default_language", default=get_system_lang_code())
def get_all_lang() -> dict[str, str]: def get_all_lang() -> dict[str, str]:
""" """
获取所有语言 获取所有语言
Returns Returns
{'en': 'English'} {'en': 'English'}
""" """
d = {} d = {}
for key in _language_data: for key in _language_data:
d[key] = _language_data[key].get("language.name", key) d[key] = _language_data[key].get("language.name", key)
return d return d

View File

@ -1,79 +1,79 @@
import sys import sys
import loguru import loguru
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from .config import load_from_yaml from .config import load_from_yaml
from .language import Language, get_default_lang_code from .language import Language, get_default_lang_code
logger = loguru.logger logger = loguru.logger
if TYPE_CHECKING: if TYPE_CHECKING:
# avoid sphinx autodoc resolve annotation failed # avoid sphinx autodoc resolve annotation failed
# because loguru module do not have `Logger` class actually # because loguru module do not have `Logger` class actually
from loguru import Record from loguru import Record
def default_filter(record: "Record"): def default_filter(record: "Record"):
"""默认的日志过滤器,根据 `config.log_level` 配置改变日志等级。""" """默认的日志过滤器,根据 `config.log_level` 配置改变日志等级。"""
log_level = record["extra"].get("nonebot_log_level", "INFO") log_level = record["extra"].get("nonebot_log_level", "INFO")
levelno = logger.level(log_level).no if isinstance(log_level, str) else log_level levelno = logger.level(log_level).no if isinstance(log_level, str) else log_level
return record["level"].no >= levelno return record["level"].no >= levelno
# DEBUG日志格式 # DEBUG日志格式
debug_format: str = ( debug_format: str = (
"<c>{time:YYYY-MM-DD HH:mm:ss}</c> " "<c>{time:YYYY-MM-DD HH:mm:ss}</c> "
"<lvl>[{level.icon}]</lvl> " "<lvl>[{level.icon}]</lvl> "
"<c><{name}.{module}.{function}:{line}></c> " "<c><{name}.{module}.{function}:{line}></c> "
"{message}" "{message}"
) )
# 默认日志格式 # 默认日志格式
default_format: str = ( default_format: str = (
"<c>{time:MM-DD HH:mm:ss}</c> " "<c>{time:MM-DD HH:mm:ss}</c> "
"<lvl>[{level.icon}]</lvl> " "<lvl>[{level.icon}]</lvl> "
"<c><{name}></c> " "<c><{name}></c> "
"{message}" "{message}"
) )
def get_format(level: str) -> str: def get_format(level: str) -> str:
if level == "DEBUG": if level == "DEBUG":
return debug_format return debug_format
else: else:
return default_format return default_format
logger = loguru.logger.bind() logger = loguru.logger.bind()
def init_log(): def init_log():
""" """
在语言加载完成后执行 在语言加载完成后执行
Returns: Returns:
""" """
global logger global logger
config = load_from_yaml("config.yml") config = load_from_yaml("config.yml")
logger.remove() logger.remove()
logger.add( logger.add(
sys.stdout, sys.stdout,
level=0, level=0,
diagnose=False, diagnose=False,
filter=default_filter, filter=default_filter,
format=get_format(config.get("log_level", "INFO")), format=get_format(config.get("log_level", "INFO")),
) )
show_icon = config.get("log_icon", True) show_icon = config.get("log_icon", True)
lang = Language(get_default_lang_code()) lang = Language(get_default_lang_code())
debug = lang.get("log.debug", default="==DEBUG") debug = lang.get("log.debug", default="==DEBUG")
info = lang.get("log.info", default="===INFO") info = lang.get("log.info", default="===INFO")
success = lang.get("log.success", default="SUCCESS") success = lang.get("log.success", default="SUCCESS")
warning = lang.get("log.warning", default="WARNING") warning = lang.get("log.warning", default="WARNING")
error = lang.get("log.error", default="==ERROR") error = lang.get("log.error", default="==ERROR")
logger.level("DEBUG", color="<blue>", icon=f"{'🐛' if show_icon else ''}{debug}") logger.level("DEBUG", color="<blue>", icon=f"{'🐛' if show_icon else ''}{debug}")
logger.level("INFO", color="<normal>", icon=f"{'' if show_icon else ''}{info}") logger.level("INFO", color="<normal>", icon=f"{'' if show_icon else ''}{info}")
logger.level("SUCCESS", color="<green>", icon=f"{'' if show_icon else ''}{success}") logger.level("SUCCESS", color="<green>", icon=f"{'' if show_icon else ''}{success}")
logger.level("WARNING", color="<yellow>", icon=f"{'⚠️' if show_icon else ''}{warning}") logger.level("WARNING", color="<yellow>", icon=f"{'⚠️' if show_icon else ''}{warning}")
logger.level("ERROR", color="<red>", icon=f"{'' if show_icon else ''}{error}") logger.level("ERROR", color="<red>", icon=f"{'' if show_icon else ''}{error}")

View File

@ -1,197 +1,197 @@
""" """
liteyuki function是一种类似于mcfunction的函数用于在liteyuki中实现一些功能例如自定义指令等也可与Python函数绑定 liteyuki function是一种类似于mcfunction的函数用于在liteyuki中实现一些功能例如自定义指令等也可与Python函数绑定
使用 /function function_name *args **kwargs来调用 使用 /function function_name *args **kwargs来调用
例如 /function test/hello user_id=123456 例如 /function test/hello user_id=123456
可以用于一些轻量级插件的编写无需Python代码 可以用于一些轻量级插件的编写无需Python代码
SnowyKami SnowyKami
""" """
import asyncio import asyncio
import functools import functools
# cmd *args **kwargs # cmd *args **kwargs
# api api_name **kwargs # api api_name **kwargs
import os import os
from typing import Any, Awaitable, Callable, Coroutine from typing import Any, Awaitable, Callable, Coroutine
import nonebot import nonebot
from nonebot import Bot from nonebot import Bot
from nonebot.adapters.satori import bot from nonebot.adapters.satori import bot
from nonebot.internal.matcher import Matcher from nonebot.internal.matcher import Matcher
ly_function_extensions = ( ly_function_extensions = (
"lyf", "lyf",
"lyfunction", "lyfunction",
"mcfunction" "mcfunction"
) )
loaded_functions = dict() loaded_functions = dict()
class LiteyukiFunction: class LiteyukiFunction:
def __init__(self, name: str): def __init__(self, name: str):
self.name = name self.name = name
self.functions: list[str] = list() self.functions: list[str] = list()
self.bot: Bot = None self.bot: Bot = None
self.kwargs_data = dict() self.kwargs_data = dict()
self.args_data = list() self.args_data = list()
self.matcher: Matcher = None self.matcher: Matcher = None
self.end = False self.end = False
self.sub_tasks: list[asyncio.Task] = list() self.sub_tasks: list[asyncio.Task] = list()
async def __call__(self, *args, **kwargs): async def __call__(self, *args, **kwargs):
self.kwargs_data.update(kwargs) self.kwargs_data.update(kwargs)
self.args_data = list(set(self.args_data + list(args))) self.args_data = list(set(self.args_data + list(args)))
for i, cmd in enumerate(self.functions): for i, cmd in enumerate(self.functions):
r = await self.execute_line(cmd, i, *args, **kwargs) r = await self.execute_line(cmd, i, *args, **kwargs)
if r == 0: if r == 0:
msg = f"End function {self.name} by line {i}" msg = f"End function {self.name} by line {i}"
nonebot.logger.debug(msg) nonebot.logger.debug(msg)
for task in self.sub_tasks: for task in self.sub_tasks:
task.cancel(msg) task.cancel(msg)
return return
def __str__(self): def __str__(self):
return f"LiteyukiFunction({self.name})" return f"LiteyukiFunction({self.name})"
def __repr__(self): def __repr__(self):
return self.__str__() return self.__str__()
async def execute_line(self, cmd: str, line: int = 0, *args, **kwargs) -> Any: async def execute_line(self, cmd: str, line: int = 0, *args, **kwargs) -> Any:
""" """
解析一行轻雪函数 解析一行轻雪函数
Args: Args:
cmd: 命令 cmd: 命令
line: 行数 line: 行数
Returns: Returns:
""" """
try: try:
if "${" in cmd: if "${" in cmd:
# 此种情况下,{}内容不用管,只对${}内的内容进行format # 此种情况下,{}内容不用管,只对${}内的内容进行format
for i in range(len(cmd) - 1): for i in range(len(cmd) - 1):
if cmd[i] == "$" and cmd[i + 1] == "{": if cmd[i] == "$" and cmd[i + 1] == "{":
end = cmd.find("}", i) end = cmd.find("}", i)
key = cmd[i + 2:end] key = cmd[i + 2:end]
cmd = cmd.replace(f"${{{key}}}", str(self.kwargs_data.get(key, ""))) cmd = cmd.replace(f"${{{key}}}", str(self.kwargs_data.get(key, "")))
else: else:
cmd = cmd.format(*self.args_data, **self.kwargs_data) cmd = cmd.format(*self.args_data, **self.kwargs_data)
except Exception as e: except Exception as e:
pass pass
no_head = cmd.split(" ", 1)[1] if len(cmd.split(" ")) > 1 else "" no_head = cmd.split(" ", 1)[1] if len(cmd.split(" ")) > 1 else ""
try: try:
head, cmd_args, cmd_kwargs = self.get_args(cmd) head, cmd_args, cmd_kwargs = self.get_args(cmd)
except Exception as e: except Exception as e:
error_msg = f"Parsing error in {self.name} at line {line}: {e}" error_msg = f"Parsing error in {self.name} at line {line}: {e}"
nonebot.logger.error(error_msg) nonebot.logger.error(error_msg)
await self.matcher.send(error_msg) await self.matcher.send(error_msg)
return return
if head == "var": if head == "var":
# 变量定义 # 变量定义
self.kwargs_data.update(cmd_kwargs) self.kwargs_data.update(cmd_kwargs)
elif head == "cmd": elif head == "cmd":
# 在当前计算机上执行命令 # 在当前计算机上执行命令
os.system(no_head) os.system(no_head)
elif head == "api": elif head == "api":
# 调用Bot API 需要Bot实例 # 调用Bot API 需要Bot实例
await self.bot.call_api(cmd_args[1], **cmd_kwargs) await self.bot.call_api(cmd_args[1], **cmd_kwargs)
elif head == "function": elif head == "function":
# 调用轻雪函数 # 调用轻雪函数
func = get_function(cmd_args[1]) func = get_function(cmd_args[1])
func.bot = self.bot func.bot = self.bot
func.matcher = self.matcher func.matcher = self.matcher
await func(*cmd_args[2:], **cmd_kwargs) await func(*cmd_args[2:], **cmd_kwargs)
elif head == "sleep": elif head == "sleep":
# 等待一段时间 # 等待一段时间
await asyncio.sleep(float(cmd_args[1])) await asyncio.sleep(float(cmd_args[1]))
elif head == "nohup": elif head == "nohup":
# 挂起运行 # 挂起运行
task = asyncio.create_task(self.execute_line(no_head)) task = asyncio.create_task(self.execute_line(no_head))
self.sub_tasks.append(task) self.sub_tasks.append(task)
elif head == "end": elif head == "end":
# 结束所有函数 # 结束所有函数
self.end = True self.end = True
return 0 return 0
elif head == "await": elif head == "await":
# 等待所有协程执行完毕 # 等待所有协程执行完毕
await asyncio.gather(*self.sub_tasks) await asyncio.gather(*self.sub_tasks)
def get_args(self, line: str) -> tuple[str, tuple[str, ...], dict[str, Any]]: def get_args(self, line: str) -> tuple[str, tuple[str, ...], dict[str, Any]]:
""" """
获取参数 获取参数
Args: Args:
line: 命令 line: 命令
Returns: Returns:
命令头 参数 关键字 命令头 参数 关键字
""" """
line = line.replace("\\=", "EQUAL_SIGN") line = line.replace("\\=", "EQUAL_SIGN")
head = "" head = ""
args = list() args = list()
kwargs = dict() kwargs = dict()
for i, arg in enumerate(line.split(" ")): for i, arg in enumerate(line.split(" ")):
if "=" in arg: if "=" in arg:
key, value = arg.split("=", 1) key, value = arg.split("=", 1)
value = value.replace("EQUAL_SIGN", "=") value = value.replace("EQUAL_SIGN", "=")
try: try:
value = eval(value) value = eval(value)
except: except:
value = self.kwargs_data.get(value, value) value = self.kwargs_data.get(value, value)
kwargs[key] = value kwargs[key] = value
else: else:
if i == 0: if i == 0:
head = arg head = arg
args.append(arg) args.append(arg)
return head, tuple(args), kwargs return head, tuple(args), kwargs
def get_function(name: str) -> LiteyukiFunction | None: def get_function(name: str) -> LiteyukiFunction | None:
""" """
获取一个轻雪函数 获取一个轻雪函数
Args: Args:
name: 函数名 name: 函数名
Returns: Returns:
""" """
return loaded_functions.get(name) return loaded_functions.get(name)
def load_from_dir(path: str): def load_from_dir(path: str):
""" """
从目录及其子目录中递归加载所有轻雪函数类似mcfunction 从目录及其子目录中递归加载所有轻雪函数类似mcfunction
Args: Args:
path: 目录路径 path: 目录路径
""" """
for f in os.listdir(path): for f in os.listdir(path):
f = os.path.join(path, f) f = os.path.join(path, f)
if os.path.isfile(f): if os.path.isfile(f):
if f.endswith(ly_function_extensions): if f.endswith(ly_function_extensions):
load_from_file(f) load_from_file(f)
if os.path.isdir(f): if os.path.isdir(f):
load_from_dir(f) load_from_dir(f)
def load_from_file(path: str): def load_from_file(path: str):
""" """
从文件中加载轻雪函数 从文件中加载轻雪函数
Args: Args:
path: path:
Returns: Returns:
""" """
with open(path, "r", encoding="utf-8") as f: with open(path, "r", encoding="utf-8") as f:
name = ".".join(os.path.basename(path).split(".")[:-1]) name = ".".join(os.path.basename(path).split(".")[:-1])
func = LiteyukiFunction(name) func = LiteyukiFunction(name)
for i, line in enumerate(f.read().split("\n")): for i, line in enumerate(f.read().split("\n")):
if line.startswith("#") or line.strip() == "": if line.startswith("#") or line.strip() == "":
continue continue
func.functions.append(line) func.functions.append(line)
loaded_functions[name] = func loaded_functions[name] = func
nonebot.logger.debug(f"Loaded function {name}") nonebot.logger.debug(f"Loaded function {name}")

View File

@ -1,8 +1,8 @@
from nonebot.adapters.onebot import v11, v12 from nonebot.adapters.onebot import v11, v12
from nonebot.adapters import satori from nonebot.adapters import satori
T_Bot = v11.Bot | v12.Bot | satori.Bot T_Bot = v11.Bot | v12.Bot | satori.Bot
T_GroupMessageEvent = v11.GroupMessageEvent | v12.GroupMessageEvent T_GroupMessageEvent = v11.GroupMessageEvent | v12.GroupMessageEvent
T_PrivateMessageEvent = v11.PrivateMessageEvent | v12.PrivateMessageEvent T_PrivateMessageEvent = v11.PrivateMessageEvent | v12.PrivateMessageEvent
T_MessageEvent = v11.MessageEvent | v12.MessageEvent | satori.MessageEvent T_MessageEvent = v11.MessageEvent | v12.MessageEvent | satori.MessageEvent
T_Message = v11.Message | v12.Message | satori.Message T_Message = v11.Message | v12.Message | satori.Message

View File

@ -1,5 +1,5 @@
from nonebot.adapters.onebot import v11 from nonebot.adapters.onebot import v11
GROUP_ADMIN = v11.GROUP_ADMIN GROUP_ADMIN = v11.GROUP_ADMIN
GROUP_OWNER = v11.GROUP_OWNER GROUP_OWNER = v11.GROUP_OWNER

View File

@ -1,355 +1,355 @@
import json import json
import os import os
import shutil import shutil
import zipfile import zipfile
from typing import Any from typing import Any
from pathlib import Path from pathlib import Path
import aiofiles import aiofiles
import nonebot import nonebot
import yaml import yaml
from .data import LiteModel from .data import LiteModel
from .language import Language, get_default_lang_code from .language import Language, get_default_lang_code
from .ly_function import loaded_functions from .ly_function import loaded_functions
_loaded_resource_packs: list["ResourceMetadata"] = [] # 按照加载顺序排序 _loaded_resource_packs: list["ResourceMetadata"] = [] # 按照加载顺序排序
temp_resource_root = Path("data/liteyuki/resources") temp_resource_root = Path("data/liteyuki/resources")
temp_extract_root = Path("data/liteyuki/temp") temp_extract_root = Path("data/liteyuki/temp")
lang = Language(get_default_lang_code()) lang = Language(get_default_lang_code())
class ResourceMetadata(LiteModel): class ResourceMetadata(LiteModel):
name: str = "Unknown" name: str = "Unknown"
version: str = "0.0.1" version: str = "0.0.1"
description: str = "Unknown" description: str = "Unknown"
path: str = "" path: str = ""
folder: str = "" folder: str = ""
def load_resource_from_dir(path: str): def load_resource_from_dir(path: str):
""" """
把资源包按照文件相对路径复制到运行临时文件夹data/liteyuki/resources 把资源包按照文件相对路径复制到运行临时文件夹data/liteyuki/resources
Args: Args:
path: 资源文件夹 path: 资源文件夹
Returns: Returns:
""" """
if os.path.exists(os.path.join(path, "metadata.yml")): if os.path.exists(os.path.join(path, "metadata.yml")):
with open(os.path.join(path, "metadata.yml"), "r", encoding="utf-8") as f: with open(os.path.join(path, "metadata.yml"), "r", encoding="utf-8") as f:
metadata = yaml.safe_load(f) metadata = yaml.safe_load(f)
elif os.path.isfile(path) and path.endswith(".zip"): elif os.path.isfile(path) and path.endswith(".zip"):
# zip文件 # zip文件
# 临时解压并读取metadata.yml # 临时解压并读取metadata.yml
with zipfile.ZipFile(path, "r") as zip_ref: with zipfile.ZipFile(path, "r") as zip_ref:
# 解压至临时目录 data/liteyuki/temp/{pack_name}.zip # 解压至临时目录 data/liteyuki/temp/{pack_name}.zip
zip_ref.extractall(os.path.join(temp_extract_root, os.path.basename(path))) zip_ref.extractall(os.path.join(temp_extract_root, os.path.basename(path)))
with zip_ref.open("metadata.yml") as f: with zip_ref.open("metadata.yml") as f:
metadata = yaml.safe_load(f) metadata = yaml.safe_load(f)
path = os.path.join(temp_extract_root, os.path.basename(path)) path = os.path.join(temp_extract_root, os.path.basename(path))
else: else:
# 没有metadata.yml文件不是一个资源包 # 没有metadata.yml文件不是一个资源包
return return
for root, dirs, files in os.walk(path): for root, dirs, files in os.walk(path):
for file in files: for file in files:
relative_path = os.path.relpath(os.path.join(root, file), path) relative_path = os.path.relpath(os.path.join(root, file), path)
copy_file( copy_file(
os.path.join(root, file), os.path.join(root, file),
os.path.join(temp_resource_root, relative_path), os.path.join(temp_resource_root, relative_path),
) )
metadata["path"] = path metadata["path"] = path
metadata["folder"] = os.path.basename(path) metadata["folder"] = os.path.basename(path)
if os.path.exists(os.path.join(path, "lang")): if os.path.exists(os.path.join(path, "lang")):
# 加载语言 # 加载语言
from src.utils.base.language import load_from_dir from src.utils.base.language import load_from_dir
load_from_dir(os.path.join(path, "lang")) load_from_dir(os.path.join(path, "lang"))
if os.path.exists(os.path.join(path, "functions")): if os.path.exists(os.path.join(path, "functions")):
# 加载功能 # 加载功能
from src.utils.base.ly_function import load_from_dir from src.utils.base.ly_function import load_from_dir
load_from_dir(os.path.join(path, "functions")) load_from_dir(os.path.join(path, "functions"))
if os.path.exists(os.path.join(path, "word_bank")): if os.path.exists(os.path.join(path, "word_bank")):
# 加载词库 # 加载词库
from src.utils.base.word_bank import load_from_dir from src.utils.base.word_bank import load_from_dir
load_from_dir(os.path.join(path, "word_bank")) load_from_dir(os.path.join(path, "word_bank"))
_loaded_resource_packs.insert(0, ResourceMetadata(**metadata)) _loaded_resource_packs.insert(0, ResourceMetadata(**metadata))
def get_path( def get_path(
path: os.PathLike[str,] | Path | str, path: os.PathLike[str,] | Path | str,
abs_path: bool = True, abs_path: bool = True,
default: Any = None, default: Any = None,
debug: bool = False, debug: bool = False,
) -> str | Any: ) -> str | Any:
""" """
获取资源包中的路径且该路径必须存在 获取资源包中的路径且该路径必须存在
Args: Args:
path: 相对路径 path: 相对路径
abs_path: 是否返回绝对路径 abs_path: 是否返回绝对路径
default: 默认解当该路径不存在时使用 default: 默认解当该路径不存在时使用
debug: 启用调试每次都会先重载资源 debug: 启用调试每次都会先重载资源
Returns: 所需求之路径 Returns: 所需求之路径
""" """
if debug: if debug:
nonebot.logger.debug("Resource path debug enabled, reloading") nonebot.logger.debug("Resource path debug enabled, reloading")
load_resources() load_resources()
resource_relative_path = temp_resource_root / path resource_relative_path = temp_resource_root / path
if resource_relative_path.exists(): if resource_relative_path.exists():
return str( return str(
resource_relative_path.resolve() if abs_path else resource_relative_path resource_relative_path.resolve() if abs_path else resource_relative_path
) )
else: else:
return default return default
def get_resource_path( def get_resource_path(
path: os.PathLike[str,] | Path | str, path: os.PathLike[str,] | Path | str,
abs_path: bool = True, abs_path: bool = True,
only_exist: bool = False, only_exist: bool = False,
default: Any = None, default: Any = None,
debug: bool = False, debug: bool = False,
) -> Path: ) -> Path:
""" """
获取资源包中的路径 获取资源包中的路径
Args: Args:
path: 相对路径 path: 相对路径
abs_path: 是否返回绝对路径 abs_path: 是否返回绝对路径
only_exist: 检查该路径是否存在 only_exist: 检查该路径是否存在
default: [ `only_exist` **** 时启用]默认解当该路径不存在时使用 default: [ `only_exist` **** 时启用]默认解当该路径不存在时使用
debug: 启用调试每次都会先重载资源 debug: 启用调试每次都会先重载资源
Returns: 所需求之路径 Returns: 所需求之路径
""" """
if debug: if debug:
nonebot.logger.debug("Resource path debug enabled, reloading") nonebot.logger.debug("Resource path debug enabled, reloading")
load_resources() load_resources()
resource_relative_path = ( resource_relative_path = (
(temp_resource_root / path).resolve() (temp_resource_root / path).resolve()
if abs_path if abs_path
else (temp_resource_root / path) else (temp_resource_root / path)
) )
if only_exist: if only_exist:
if resource_relative_path.exists(): if resource_relative_path.exists():
return resource_relative_path return resource_relative_path
else: else:
return default return default
else: else:
return resource_relative_path return resource_relative_path
def get_files( def get_files(
path: os.PathLike[str,] | Path | str, abs_path: bool = False path: os.PathLike[str,] | Path | str, abs_path: bool = False
) -> list[str]: ) -> list[str]:
""" """
获取资源包中一个目录的所有内容 获取资源包中一个目录的所有内容
Args: Args:
path: 该目录的相对路径 path: 该目录的相对路径
abs_path: 是否返回绝对路径 abs_path: 是否返回绝对路径
Returns: 目录内容路径所构成之列表 Returns: 目录内容路径所构成之列表
""" """
resource_relative_path = temp_resource_root / path resource_relative_path = temp_resource_root / path
if resource_relative_path.exists(): if resource_relative_path.exists():
return [ return [
( (
str((resource_relative_path / file_).resolve()) str((resource_relative_path / file_).resolve())
if abs_path if abs_path
else str((resource_relative_path / file_)) else str((resource_relative_path / file_))
) )
for file_ in os.listdir(resource_relative_path) for file_ in os.listdir(resource_relative_path)
] ]
else: else:
return [] return []
def get_resource_files( def get_resource_files(
path: os.PathLike[str,] | Path | str, abs_path: bool = False path: os.PathLike[str,] | Path | str, abs_path: bool = False
) -> list[Path]: ) -> list[Path]:
""" """
获取资源包中一个目录的所有内容 获取资源包中一个目录的所有内容
Args: Args:
path: 该目录的相对路径 path: 该目录的相对路径
abs_path: 是否返回绝对路径 abs_path: 是否返回绝对路径
Returns: 目录内容路径所构成之列表 Returns: 目录内容路径所构成之列表
""" """
resource_relative_path = temp_resource_root / path resource_relative_path = temp_resource_root / path
if resource_relative_path.exists(): if resource_relative_path.exists():
return [ return [
( (
(resource_relative_path / file_).resolve() (resource_relative_path / file_).resolve()
if abs_path if abs_path
else (resource_relative_path / file_) else (resource_relative_path / file_)
) )
for file_ in os.listdir(resource_relative_path) for file_ in os.listdir(resource_relative_path)
] ]
else: else:
return [] return []
def get_loaded_resource_packs() -> list[ResourceMetadata]: def get_loaded_resource_packs() -> list[ResourceMetadata]:
""" """
获取已加载的资源包优先级从前到后 获取已加载的资源包优先级从前到后
Returns: 资源包列表 Returns: 资源包列表
""" """
return _loaded_resource_packs return _loaded_resource_packs
def copy_file(src, dst): def copy_file(src, dst):
# 获取目标文件的目录 # 获取目标文件的目录
dst_dir = os.path.dirname(dst) dst_dir = os.path.dirname(dst)
# 如果目标目录不存在,创建它 # 如果目标目录不存在,创建它
if not os.path.exists(dst_dir): if not os.path.exists(dst_dir):
os.makedirs(dst_dir) os.makedirs(dst_dir)
# 复制文件 # 复制文件
shutil.copy(src, dst) shutil.copy(src, dst)
def load_resources(): def load_resources():
"""用于外部主程序调用的资源加载函数 """用于外部主程序调用的资源加载函数
Returns: Returns:
""" """
# 加载默认资源和语言 # 加载默认资源和语言
# 清空临时资源包路径data/liteyuki/resources # 清空临时资源包路径data/liteyuki/resources
_loaded_resource_packs.clear() _loaded_resource_packs.clear()
loaded_functions.clear() loaded_functions.clear()
if os.path.exists(temp_resource_root): if os.path.exists(temp_resource_root):
shutil.rmtree(temp_resource_root) shutil.rmtree(temp_resource_root)
os.makedirs(temp_resource_root, exist_ok=True) os.makedirs(temp_resource_root, exist_ok=True)
# 加载内置资源 # 加载内置资源
standard_resources_path = "src/resources" standard_resources_path = "src/resources"
for resource_dir in os.listdir(standard_resources_path): for resource_dir in os.listdir(standard_resources_path):
load_resource_from_dir(os.path.join(standard_resources_path, resource_dir)) load_resource_from_dir(os.path.join(standard_resources_path, resource_dir))
# 加载其他资源包 # 加载其他资源包
if not os.path.exists("resources"): if not os.path.exists("resources"):
os.makedirs("resources", exist_ok=True) os.makedirs("resources", exist_ok=True)
if not os.path.exists("resources/index.json"): if not os.path.exists("resources/index.json"):
json.dump([], open("resources/index.json", "w", encoding="utf-8")) json.dump([], open("resources/index.json", "w", encoding="utf-8"))
resource_index: list[str] = json.load( resource_index: list[str] = json.load(
open("resources/index.json", "r", encoding="utf-8") open("resources/index.json", "r", encoding="utf-8")
) )
resource_index.reverse() # 优先级高的后加载,但是排在前面 resource_index.reverse() # 优先级高的后加载,但是排在前面
for resource in resource_index: for resource in resource_index:
load_resource_from_dir(os.path.join("resources", resource)) load_resource_from_dir(os.path.join("resources", resource))
def check_status(name: str) -> bool: def check_status(name: str) -> bool:
""" """
检查资源包是否已加载 检查资源包是否已加载
Args: Args:
name: 资源包名称文件夹名 name: 资源包名称文件夹名
Returns: 是否已加载 Returns: 是否已加载
""" """
return name in [rp.folder for rp in get_loaded_resource_packs()] return name in [rp.folder for rp in get_loaded_resource_packs()]
def check_exist(name: str) -> bool: def check_exist(name: str) -> bool:
""" """
检查资源包文件夹是否存在于resources文件夹 检查资源包文件夹是否存在于resources文件夹
Args: Args:
name: 资源包名称文件夹名 name: 资源包名称文件夹名
Returns: 是否存在 Returns: 是否存在
""" """
path = os.path.join("resources", name) path = os.path.join("resources", name)
return os.path.exists(os.path.join(path, "metadata.yml")) or ( return os.path.exists(os.path.join(path, "metadata.yml")) or (
os.path.isfile(path) and name.endswith(".zip") os.path.isfile(path) and name.endswith(".zip")
) )
def add_resource_pack(name: str) -> bool: def add_resource_pack(name: str) -> bool:
""" """
添加资源包该操作仅修改index.json文件不会加载资源包要生效请重载资源 添加资源包该操作仅修改index.json文件不会加载资源包要生效请重载资源
Args: Args:
name: 资源包名称文件夹名 name: 资源包名称文件夹名
Returns: Returns:
""" """
if check_exist(name): if check_exist(name):
old_index: list[str] = json.load( old_index: list[str] = json.load(
open("resources/index.json", "r", encoding="utf-8") open("resources/index.json", "r", encoding="utf-8")
) )
if name not in old_index: if name not in old_index:
old_index.append(name) old_index.append(name)
json.dump(old_index, open("resources/index.json", "w", encoding="utf-8")) json.dump(old_index, open("resources/index.json", "w", encoding="utf-8"))
load_resource_from_dir(os.path.join("resources", name)) load_resource_from_dir(os.path.join("resources", name))
return True return True
else: else:
nonebot.logger.warning(lang.get("liteyuki.resource_loaded", name=name)) nonebot.logger.warning(lang.get("liteyuki.resource_loaded", name=name))
return False return False
else: else:
nonebot.logger.warning(lang.get("liteyuki.resource_not_exist", name=name)) nonebot.logger.warning(lang.get("liteyuki.resource_not_exist", name=name))
return False return False
def remove_resource_pack(name: str) -> bool: def remove_resource_pack(name: str) -> bool:
""" """
移除资源包该操作仅修改加载索引要生效请重载资源 移除资源包该操作仅修改加载索引要生效请重载资源
Args: Args:
name: 资源包名称文件夹名 name: 资源包名称文件夹名
Returns: Returns:
""" """
if check_exist(name): if check_exist(name):
old_index: list[str] = json.load( old_index: list[str] = json.load(
open("resources/index.json", "r", encoding="utf-8") open("resources/index.json", "r", encoding="utf-8")
) )
if name in old_index: if name in old_index:
old_index.remove(name) old_index.remove(name)
json.dump(old_index, open("resources/index.json", "w", encoding="utf-8")) json.dump(old_index, open("resources/index.json", "w", encoding="utf-8"))
return True return True
else: else:
nonebot.logger.warning(lang.get("liteyuki.resource_not_loaded", name=name)) nonebot.logger.warning(lang.get("liteyuki.resource_not_loaded", name=name))
return False return False
else: else:
nonebot.logger.warning(lang.get("liteyuki.resource_not_exist", name=name)) nonebot.logger.warning(lang.get("liteyuki.resource_not_exist", name=name))
return False return False
def change_priority(name: str, delta: int) -> bool: def change_priority(name: str, delta: int) -> bool:
""" """
修改资源包优先级 修改资源包优先级
Args: Args:
name: 资源包名称文件夹名 name: 资源包名称文件夹名
delta: 优先级变化正数表示后移负数表示前移0表示移到最前 delta: 优先级变化正数表示后移负数表示前移0表示移到最前
Returns: Returns:
""" """
# 正数表示前移,负数表示后移 # 正数表示前移,负数表示后移
old_resource_list: list[str] = json.load( old_resource_list: list[str] = json.load(
open("resources/index.json", "r", encoding="utf-8") open("resources/index.json", "r", encoding="utf-8")
) )
new_resource_list = old_resource_list.copy() new_resource_list = old_resource_list.copy()
if name in old_resource_list: if name in old_resource_list:
index = old_resource_list.index(name) index = old_resource_list.index(name)
if 0 <= index + delta < len(old_resource_list): if 0 <= index + delta < len(old_resource_list):
new_index = index + delta new_index = index + delta
new_resource_list.remove(name) new_resource_list.remove(name)
new_resource_list.insert(new_index, name) new_resource_list.insert(new_index, name)
json.dump( json.dump(
new_resource_list, open("resources/index.json", "w", encoding="utf-8") new_resource_list, open("resources/index.json", "w", encoding="utf-8")
) )
return True return True
else: else:
nonebot.logger.warning("Priority change failed, out of range") nonebot.logger.warning("Priority change failed, out of range")
return False return False
else: else:
nonebot.logger.debug("Priority change failed, resource not loaded") nonebot.logger.debug("Priority change failed, resource not loaded")
return False return False
def get_resource_metadata(name: str) -> ResourceMetadata: def get_resource_metadata(name: str) -> ResourceMetadata:
""" """
获取资源包元数据 获取资源包元数据
Args: Args:
name: 资源包名称文件夹名 name: 资源包名称文件夹名
Returns: Returns:
""" """
for rp in get_loaded_resource_packs(): for rp in get_loaded_resource_packs():
if rp.folder == name: if rp.folder == name:
return rp return rp
return ResourceMetadata() return ResourceMetadata()

View File

@ -1,57 +1,57 @@
import json import json
import os import os
import random import random
from typing import Iterable from typing import Iterable
import nonebot import nonebot
word_bank: dict[str, set[str]] = {} word_bank: dict[str, set[str]] = {}
def load_from_file(file_path: str): def load_from_file(file_path: str):
""" """
从json文件中加载词库 从json文件中加载词库
Args: Args:
file_path: 文件路径 file_path: 文件路径
""" """
with open(file_path, "r", encoding="utf-8") as file: with open(file_path, "r", encoding="utf-8") as file:
data = json.load(file) data = json.load(file)
for key, value_list in data.items(): for key, value_list in data.items():
if key not in word_bank: if key not in word_bank:
word_bank[key] = set() word_bank[key] = set()
word_bank[key].update(value_list) word_bank[key].update(value_list)
nonebot.logger.debug(f"Loaded word bank from {file_path}") nonebot.logger.debug(f"Loaded word bank from {file_path}")
def load_from_dir(dir_path: str): def load_from_dir(dir_path: str):
""" """
从目录中加载词库 从目录中加载词库
Args: Args:
dir_path: 目录路径 dir_path: 目录路径
""" """
for file in os.listdir(dir_path): for file in os.listdir(dir_path):
try: try:
file_path = os.path.join(dir_path, file) file_path = os.path.join(dir_path, file)
if os.path.isfile(file_path): if os.path.isfile(file_path):
if file.endswith(".json"): if file.endswith(".json"):
load_from_file(file_path) load_from_file(file_path)
except Exception as e: except Exception as e:
nonebot.logger.error(f"Failed to load language data from {file}: {e}") nonebot.logger.error(f"Failed to load language data from {file}: {e}")
continue continue
def get_reply(kws: Iterable[str]) -> str | None: def get_reply(kws: Iterable[str]) -> str | None:
""" """
获取回复 获取回复
Args: Args:
kws: 关键词 kws: 关键词
Returns: Returns:
""" """
for kw in kws: for kw in kws:
if kw in word_bank: if kw in word_bank:
return random.choice(list(word_bank[kw])) return random.choice(list(word_bank[kw]))
return None return None

View File

@ -1 +1 @@
from .get_info import * from .get_info import *

View File

@ -1,26 +1,26 @@
from nonebot.adapters import satori from nonebot.adapters import satori
from nonebot.adapters import onebot from nonebot.adapters import onebot
from src.utils.base.ly_typing import T_MessageEvent, T_GroupMessageEvent from src.utils.base.ly_typing import T_MessageEvent, T_GroupMessageEvent
def get_user_id(event: T_MessageEvent): def get_user_id(event: T_MessageEvent):
if isinstance(event, satori.event.Event): if isinstance(event, satori.event.Event):
return event.user.id return event.user.id
else: else:
return event.user_id return event.user_id
def get_group_id(event: T_GroupMessageEvent): def get_group_id(event: T_GroupMessageEvent):
if isinstance(event, satori.event.Event): if isinstance(event, satori.event.Event):
return event.guild.id return event.guild.id
elif isinstance(event, onebot.v11.GroupMessageEvent): elif isinstance(event, onebot.v11.GroupMessageEvent):
return event.group_id return event.group_id
else: else:
return None return None
def get_message_type(event: T_MessageEvent) -> str: def get_message_type(event: T_MessageEvent) -> str:
if isinstance(event, satori.event.Event): if isinstance(event, satori.event.Event):
return "private" if event.guild is None else "group" return "private" if event.guild is None else "group"
else: else:
return event.message_type return event.message_type

View File

@ -1,40 +1,40 @@
async def get_user_icon(platform: str, user_id: str) -> str: async def get_user_icon(platform: str, user_id: str) -> str:
""" """
获取用户头像 获取用户头像
Args: Args:
platform: qq, telegram, discord... platform: qq, telegram, discord...
user_id: 1234567890 user_id: 1234567890
Returns: Returns:
str: 头像链接 str: 头像链接
""" """
match platform: match platform:
case "qq": case "qq":
return f"http://q1.qlogo.cn/g?b=qq&nk={user_id}&s=640" return f"http://q1.qlogo.cn/g?b=qq&nk={user_id}&s=640"
case "telegram": case "telegram":
return f"https://t.me/i/userpic/320/{user_id}.jpg" return f"https://t.me/i/userpic/320/{user_id}.jpg"
case "discord": case "discord":
return f"https://cdn.discordapp.com/avatars/{user_id}/" return f"https://cdn.discordapp.com/avatars/{user_id}/"
case _: case _:
return "" return ""
async def get_group_icon(platform: str, group_id: str) -> str: async def get_group_icon(platform: str, group_id: str) -> str:
""" """
获取群组头像 获取群组头像
Args: Args:
platform: qq, telegram, discord... platform: qq, telegram, discord...
group_id: 1234567890 group_id: 1234567890
Returns: Returns:
str: 头像链接 str: 头像链接
""" """
match platform: match platform:
case "qq": case "qq":
return f"http://p.qlogo.cn/gh/{group_id}/{group_id}/640" return f"http://p.qlogo.cn/gh/{group_id}/{group_id}/640"
case "telegram": case "telegram":
return f"https://t.me/c/{group_id}/" return f"https://t.me/c/{group_id}/"
case "discord": case "discord":
return f"https://cdn.discordapp.com/icons/{group_id}/" return f"https://cdn.discordapp.com/icons/{group_id}/"
case _: case _:
return "" return ""

View File

@ -1,89 +1,89 @@
import os import os
import aiofiles # type: ignore import aiofiles # type: ignore
import nonebot import nonebot
from nonebot import require from nonebot import require
# require("nonebot_plugin_htmlrender") # require("nonebot_plugin_htmlrender")
from nonebot_plugin_htmlrender import ( # type: ignore from nonebot_plugin_htmlrender import ( # type: ignore
template_to_html, template_to_html,
template_to_pic, template_to_pic,
md_to_pic md_to_pic
) # type: ignore ) # type: ignore
async def template2html( async def template2html(
template: str, template: str,
templates: dict, templates: dict,
) -> str: ) -> str:
""" """
Args: Args:
template: str: 模板文件 template: str: 模板文件
**templates: dict: 模板参数 **templates: dict: 模板参数
Returns: Returns:
HTML 正文 HTML 正文
""" """
template_path = os.path.dirname(template) template_path = os.path.dirname(template)
template_name = os.path.basename(template) template_name = os.path.basename(template)
return await template_to_html(template_path, template_name, **templates) return await template_to_html(template_path, template_name, **templates)
async def template2image( async def template2image(
template: str, template: str,
templates: dict, templates: dict,
pages=None, pages=None,
wait: int = 0, wait: int = 0,
scale_factor: float = 1, scale_factor: float = 1,
debug: bool = False, debug: bool = False,
) -> bytes: ) -> bytes:
""" """
template -> html -> image template -> html -> image
Args: Args:
debug: 输入渲染好的 html debug: 输入渲染好的 html
wait: 等待时间单位秒 wait: 等待时间单位秒
pages: 页面参数 pages: 页面参数
template: str: 模板文件 template: str: 模板文件
templates: dict: 模板参数 templates: dict: 模板参数
scale_factor: 缩放因子越高越清晰 scale_factor: 缩放因子越高越清晰
Returns: Returns:
图片二进制数据 图片二进制数据
""" """
### ###
if pages is None: if pages is None:
pages = { pages = {
"viewport": { "viewport": {
"width" : 1080, "width" : 1080,
"height": 10 "height": 10
}, },
} }
template_path = os.path.dirname(template) template_path = os.path.dirname(template)
template_name = os.path.basename(template) template_name = os.path.basename(template)
if debug: if debug:
# 重载资源 # 重载资源
raw_html = await template_to_html( raw_html = await template_to_html(
template_name=template_name, template_name=template_name,
template_path=template_path, template_path=template_path,
**templates, **templates,
) )
random_file_name = f"debug.html" random_file_name = f"debug.html"
async with aiofiles.open( async with aiofiles.open(
os.path.join(template_path, random_file_name), "w", encoding="utf-8" os.path.join(template_path, random_file_name), "w", encoding="utf-8"
) as f: ) as f:
await f.write(raw_html) await f.write(raw_html)
nonebot.logger.info("Debug HTML: %s" % f"{random_file_name}") nonebot.logger.info("Debug HTML: %s" % f"{random_file_name}")
return await template_to_pic( return await template_to_pic(
template_name=template_name, template_name=template_name,
template_path=template_path, template_path=template_path,
templates=templates, templates=templates,
wait=wait, wait=wait,
### ###
pages=pages, pages=pages,
device_scale_factor=scale_factor device_scale_factor=scale_factor
### ###
) )

View File

@ -1,209 +1,209 @@
import base64 import base64
from io import BytesIO from io import BytesIO
from urllib.parse import quote from urllib.parse import quote
import aiohttp import aiohttp
from PIL import Image from PIL import Image
from ..base.config import get_config from ..base.config import get_config
from ..base.data import LiteModel from ..base.data import LiteModel
from ..base.ly_typing import T_Bot from ..base.ly_typing import T_Bot
def escape_md(text: str) -> str: def escape_md(text: str) -> str:
""" """
转义Markdown特殊字符 转义Markdown特殊字符
Args: Args:
text: str: 文本 text: str: 文本
Returns: Returns:
str: 转义后文本 str: 转义后文本
""" """
spacial_chars = r"\`*_{}[]()#+-.!" spacial_chars = r"\`*_{}[]()#+-.!"
for char in spacial_chars: for char in spacial_chars:
text = text.replace(char, "\\\\" + char) text = text.replace(char, "\\\\" + char)
return text.replace("\n", r"\n").replace('"', r'\\\"') return text.replace("\n", r"\n").replace('"', r'\\\"')
def escape_decorator(func): def escape_decorator(func):
def wrapper(text: str): def wrapper(text: str):
return func(escape_md(text)) return func(escape_md(text))
return wrapper return wrapper
def compile_md(comps: list[str]) -> str: def compile_md(comps: list[str]) -> str:
""" """
合成Markdown文本 合成Markdown文本
Args: Args:
comps: list[str]: 组件列表 comps: list[str]: 组件列表
Returns: Returns:
str: 编译后文本 str: 编译后文本
""" """
return "".join(comps) return "".join(comps)
class MarkdownComponent: class MarkdownComponent:
@staticmethod @staticmethod
def heading(text: str, level: int = 1) -> str: def heading(text: str, level: int = 1) -> str:
"""标题""" """标题"""
assert 1 <= level <= 6, "标题级别应在 1-6 之间" assert 1 <= level <= 6, "标题级别应在 1-6 之间"
return f"{'#' * level} {text}\n" return f"{'#' * level} {text}\n"
@staticmethod @staticmethod
def bold(text: str) -> str: def bold(text: str) -> str:
"""粗体""" """粗体"""
return f"**{text}**" return f"**{text}**"
@staticmethod @staticmethod
def italic(text: str) -> str: def italic(text: str) -> str:
"""斜体""" """斜体"""
return f"*{text}*" return f"*{text}*"
@staticmethod @staticmethod
def strike(text: str) -> str: def strike(text: str) -> str:
"""删除线""" """删除线"""
return f"~~{text}~~" return f"~~{text}~~"
@staticmethod @staticmethod
def code(text: str) -> str: def code(text: str) -> str:
"""行内代码""" """行内代码"""
return f"`{text}`" return f"`{text}`"
@staticmethod @staticmethod
def code_block(text: str, language: str = "") -> str: def code_block(text: str, language: str = "") -> str:
"""代码块""" """代码块"""
return f"```{language}\n{text}\n```\n" return f"```{language}\n{text}\n```\n"
@staticmethod @staticmethod
def quote(text: str) -> str: def quote(text: str) -> str:
"""引用""" """引用"""
return f"> {text}\n\n" return f"> {text}\n\n"
@staticmethod @staticmethod
def link(text: str, url: str, symbol: bool = True) -> str: def link(text: str, url: str, symbol: bool = True) -> str:
""" """
链接 链接
Args: Args:
text: 链接文本 text: 链接文本
url: 链接地址 url: 链接地址
symbol: 是否显示链接图标, mqqapi请使用False symbol: 是否显示链接图标, mqqapi请使用False
""" """
return f"[{'🔗' if symbol else ''}{text}]({url})" return f"[{'🔗' if symbol else ''}{text}]({url})"
@staticmethod @staticmethod
def image(url: str, *, size: tuple[int, int]) -> str: def image(url: str, *, size: tuple[int, int]) -> str:
""" """
图片本地图片不建议直接使用 图片本地图片不建议直接使用
Args: Args:
url: 图片链接 url: 图片链接
size: 图片大小 size: 图片大小
Returns: Returns:
markdown格式的图片 markdown格式的图片
""" """
return f"![image #{size[0]}px #{size[1]}px]({url})" return f"![image #{size[0]}px #{size[1]}px]({url})"
@staticmethod @staticmethod
async def auto_image(image: str | bytes, bot: T_Bot) -> str: async def auto_image(image: str | bytes, bot: T_Bot) -> str:
""" """
自动获取图片大小 自动获取图片大小
Args: Args:
image: 本地图片路径 | 图片url http/file | 图片bytes image: 本地图片路径 | 图片url http/file | 图片bytes
bot: bot对象用于上传图片到图床 bot: bot对象用于上传图片到图床
Returns: Returns:
markdown格式的图片 markdown格式的图片
""" """
if isinstance(image, bytes): if isinstance(image, bytes):
# 传入为二进制图片 # 传入为二进制图片
image_obj = Image.open(BytesIO(image)) image_obj = Image.open(BytesIO(image))
base64_string = base64.b64encode(image_obj.tobytes()).decode("utf-8") base64_string = base64.b64encode(image_obj.tobytes()).decode("utf-8")
url = await bot.call_api("upload_image", file=f"base64://{base64_string}") url = await bot.call_api("upload_image", file=f"base64://{base64_string}")
size = image_obj.size size = image_obj.size
elif isinstance(image, str): elif isinstance(image, str):
# 传入链接或本地路径 # 传入链接或本地路径
if image.startswith("http"): if image.startswith("http"):
# 网络请求 # 网络请求
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
async with session.get(image) as resp: async with session.get(image) as resp:
image_data = await resp.read() image_data = await resp.read()
url = image url = image
size = Image.open(BytesIO(image_data)).size size = Image.open(BytesIO(image_data)).size
else: else:
# 本地路径/file:// # 本地路径/file://
image_obj = Image.open(image.replace("file://", "")) image_obj = Image.open(image.replace("file://", ""))
base64_string = base64.b64encode(image_obj.tobytes()).decode("utf-8") base64_string = base64.b64encode(image_obj.tobytes()).decode("utf-8")
url = await bot.call_api("upload_image", file=f"base64://{base64_string}") url = await bot.call_api("upload_image", file=f"base64://{base64_string}")
size = image_obj.size size = image_obj.size
else: else:
raise ValueError("图片类型错误") raise ValueError("图片类型错误")
return MarkdownComponent.image(url, size=size) return MarkdownComponent.image(url, size=size)
@staticmethod @staticmethod
def table(data: list[list[any]]) -> str: def table(data: list[list[any]]) -> str:
""" """
表格 表格
Args: Args:
data: 表格数据二维列表 data: 表格数据二维列表
Returns: Returns:
markdown格式的表格 markdown格式的表格
""" """
# 表头 # 表头
table = "|".join(map(str, data[0])) + "\n" table = "|".join(map(str, data[0])) + "\n"
table += "|".join([":-:" for _ in range(len(data[0]))]) + "\n" table += "|".join([":-:" for _ in range(len(data[0]))]) + "\n"
# 表内容 # 表内容
for row in data[1:]: for row in data[1:]:
table += "|".join(map(str, row)) + "\n" table += "|".join(map(str, row)) + "\n"
return table return table
@staticmethod @staticmethod
def paragraph(text: str) -> str: def paragraph(text: str) -> str:
""" """
段落 段落
Args: Args:
text: 段落内容 text: 段落内容
Returns: Returns:
markdown格式的段落 markdown格式的段落
""" """
return f"{text}\n" return f"{text}\n"
class Mqqapi: class Mqqapi:
@staticmethod @staticmethod
@escape_decorator @escape_decorator
def cmd(text: str, cmd: str, enter: bool = True, reply: bool = False, use_cmd_start: bool = True) -> str: def cmd(text: str, cmd: str, enter: bool = True, reply: bool = False, use_cmd_start: bool = True) -> str:
""" """
生成点击回调文本 生成点击回调文本
Args: Args:
text: 显示内容 text: 显示内容
cmd: 命令 cmd: 命令
enter: 是否自动发送 enter: 是否自动发送
reply: 是否回复 reply: 是否回复
use_cmd_start: 是否使用配置的命令前缀 use_cmd_start: 是否使用配置的命令前缀
Returns: Returns:
[text](mqqapi://) markdown格式的可点击回调文本类似于链接 [text](mqqapi://) markdown格式的可点击回调文本类似于链接
""" """
if use_cmd_start: if use_cmd_start:
command_start = get_config("command_start", []) command_start = get_config("command_start", [])
if command_start: if command_start:
# 若命令前缀不为空,则使用配置的第一个命令前缀 # 若命令前缀不为空,则使用配置的第一个命令前缀
cmd = f"{command_start[0]}{cmd}" cmd = f"{command_start[0]}{cmd}"
return f"[{text}](mqqapi://aio/inlinecmd?command={quote(cmd)}&reply={str(reply).lower()}&enter={str(enter).lower()})" return f"[{text}](mqqapi://aio/inlinecmd?command={quote(cmd)}&reply={str(reply).lower()}&enter={str(enter).lower()})"
class RenderData(LiteModel): class RenderData(LiteModel):
label: str label: str
visited_label: str visited_label: str
style: int style: int
class Button(LiteModel): class Button(LiteModel):
id: int id: int
render_data: RenderData render_data: RenderData

View File

@ -1,202 +1,202 @@
import base64 import base64
import io import io
from typing import Any from typing import Any
from urllib.parse import quote from urllib.parse import quote
import aiofiles import aiofiles
import aiohttp import aiohttp
import nonebot import nonebot
from PIL import Image from PIL import Image
from nonebot.adapters.onebot import v11 from nonebot.adapters.onebot import v11
from .html_tool import md_to_pic from .html_tool import md_to_pic
from .. import load_from_yaml from .. import load_from_yaml
from ..base.ly_typing import T_Bot, T_Message, T_MessageEvent from ..base.ly_typing import T_Bot, T_Message, T_MessageEvent
config = load_from_yaml("config.yml") config = load_from_yaml("config.yml")
async def broadcast_to_superusers(message: str | T_Message, markdown: bool = False): async def broadcast_to_superusers(message: str | T_Message, markdown: bool = False):
"""广播消息给超级用户""" """广播消息给超级用户"""
for bot in nonebot.get_bots().values(): for bot in nonebot.get_bots().values():
for user_id in config.get("superusers", []): for user_id in config.get("superusers", []):
if markdown: if markdown:
await MarkdownMessage.send_md(message, bot, message_type="private", session_id=user_id) await MarkdownMessage.send_md(message, bot, message_type="private", session_id=user_id)
else: else:
await bot.send_private_msg(user_id=user_id, message=message) await bot.send_private_msg(user_id=user_id, message=message)
class MarkdownMessage: class MarkdownMessage:
@staticmethod @staticmethod
async def send_md( async def send_md(
markdown: str, markdown: str,
bot: T_Bot, *, bot: T_Bot, *,
message_type: str = None, message_type: str = None,
session_id: str | int = None session_id: str | int = None
) -> dict[str, Any] | None: ) -> dict[str, Any] | None:
""" """
发送Markdown消息支持自动转为图片发送 发送Markdown消息支持自动转为图片发送
Args: Args:
markdown: markdown:
bot: bot:
message_type: message_type:
session_id: session_id:
Returns: Returns:
""" """
plain_markdown = markdown.replace("[🔗", "[") plain_markdown = markdown.replace("[🔗", "[")
md_image_bytes = await md_to_pic( md_image_bytes = await md_to_pic(
md=plain_markdown, md=plain_markdown,
width=540, width=540,
device_scale_factor=4 device_scale_factor=4
) )
print(md_image_bytes) print(md_image_bytes)
data = await bot.send_msg( data = await bot.send_msg(
message_type=message_type, message_type=message_type,
group_id=session_id, group_id=session_id,
user_id=session_id, user_id=session_id,
message=v11.MessageSegment.image(md_image_bytes), message=v11.MessageSegment.image(md_image_bytes),
) )
return data return data
@staticmethod @staticmethod
async def send_image( async def send_image(
image: bytes | str, image: bytes | str,
bot: T_Bot, *, bot: T_Bot, *,
message_type: str = None, message_type: str = None,
session_id: str | int = None, session_id: str | int = None,
event: T_MessageEvent = None, event: T_MessageEvent = None,
**kwargs **kwargs
) -> dict: ) -> dict:
""" """
发送单张装逼大图 发送单张装逼大图
Args: Args:
image: 图片字节流或图片本地路径链接请使用Markdown.image_async方法获取后通过send_md发送 image: 图片字节流或图片本地路径链接请使用Markdown.image_async方法获取后通过send_md发送
bot: bot instance bot: bot instance
message_type: message message_type message_type: message message_type
session_id: session id session_id: session id
event: event event: event
kwargs: other arguments kwargs: other arguments
Returns: Returns:
dict: response data dict: response data
""" """
if isinstance(image, str): if isinstance(image, str):
async with aiofiles.open(image, "rb") as f: async with aiofiles.open(image, "rb") as f:
image = await f.read() image = await f.read()
method = 2 method = 2
if method == 2: if method == 2:
base64_string = base64.b64encode(image).decode("utf-8") base64_string = base64.b64encode(image).decode("utf-8")
data = await bot.call_api("upload_image", file=f"base64://{base64_string}") data = await bot.call_api("upload_image", file=f"base64://{base64_string}")
await MarkdownMessage.send_md(MarkdownMessage.image(data, Image.open(io.BytesIO(image)).size), bot, await MarkdownMessage.send_md(MarkdownMessage.image(data, Image.open(io.BytesIO(image)).size), bot,
message_type=message_type, message_type=message_type,
session_id=session_id) session_id=session_id)
# 其他实现端方案 # 其他实现端方案
else: else:
image_message_id = (await bot.send_private_msg( image_message_id = (await bot.send_private_msg(
user_id=bot.self_id, user_id=bot.self_id,
message=[ message=[
v11.MessageSegment.image(file=image) v11.MessageSegment.image(file=image)
] ]
))["message_id"] ))["message_id"]
image_url = (await bot.get_msg(message_id=image_message_id))["message"][0]["data"]["url"] image_url = (await bot.get_msg(message_id=image_message_id))["message"][0]["data"]["url"]
image_size = Image.open(io.BytesIO(image)).size image_size = Image.open(io.BytesIO(image)).size
image_md = MarkdownMessage.image(image_url, image_size) image_md = MarkdownMessage.image(image_url, image_size)
return await MarkdownMessage.send_md(image_md, bot, message_type=message_type, session_id=session_id) return await MarkdownMessage.send_md(image_md, bot, message_type=message_type, session_id=session_id)
if data is None: if data is None:
data = await bot.send_msg( data = await bot.send_msg(
message_type=message_type, message_type=message_type,
group_id=session_id, group_id=session_id,
user_id=session_id, user_id=session_id,
message=v11.MessageSegment.image(image), message=v11.MessageSegment.image(image),
**kwargs **kwargs
) )
return data return data
@staticmethod @staticmethod
async def get_image_url(image: bytes | str, bot: T_Bot) -> str: async def get_image_url(image: bytes | str, bot: T_Bot) -> str:
"""把图片上传到图床,返回链接 """把图片上传到图床,返回链接
Args: Args:
bot: 发送的bot bot: 发送的bot
image: 图片字节流或图片本地路径 image: 图片字节流或图片本地路径
Returns: Returns:
""" """
# 等林文轩修好Lagrange.OneBot再说 # 等林文轩修好Lagrange.OneBot再说
@staticmethod @staticmethod
def btn_cmd(name: str, cmd: str, reply: bool = False, enter: bool = True) -> str: def btn_cmd(name: str, cmd: str, reply: bool = False, enter: bool = True) -> str:
"""生成点击回调按钮 """生成点击回调按钮
Args: Args:
name: 按钮显示内容 name: 按钮显示内容
cmd: 发送的命令已在函数内url编码不需要再次编码 cmd: 发送的命令已在函数内url编码不需要再次编码
reply: 是否以回复的方式发送消息 reply: 是否以回复的方式发送消息
enter: 自动发送消息则为True否则填充到输入框 enter: 自动发送消息则为True否则填充到输入框
Returns: Returns:
markdown格式的可点击回调按钮 markdown格式的可点击回调按钮
""" """
if "" not in config.get("command_start", ["/"]) and config.get("alconna_use_command_start", False): if "" not in config.get("command_start", ["/"]) and config.get("alconna_use_command_start", False):
cmd = f"{config['command_start'][0]}{cmd}" cmd = f"{config['command_start'][0]}{cmd}"
return f"[{name}](mqqapi://aio/inlinecmd?command={quote(cmd)}&reply={str(reply).lower()}&enter={str(enter).lower()})" return f"[{name}](mqqapi://aio/inlinecmd?command={quote(cmd)}&reply={str(reply).lower()}&enter={str(enter).lower()})"
@staticmethod @staticmethod
def btn_link(name: str, url: str) -> str: def btn_link(name: str, url: str) -> str:
"""生成点击链接按钮 """生成点击链接按钮
Args: Args:
name: 链接显示内容 name: 链接显示内容
url: 链接地址 url: 链接地址
Returns: Returns:
markdown格式的链接 markdown格式的链接
""" """
return f"[🔗{name}]({url})" return f"[🔗{name}]({url})"
@staticmethod @staticmethod
def image(url: str, size: tuple[int, int]) -> str: def image(url: str, size: tuple[int, int]) -> str:
"""构建图片链接 """构建图片链接
Args: Args:
size: size:
url: 图片链接 url: 图片链接
Returns: Returns:
markdown格式的图片 markdown格式的图片
""" """
return f"![image #{size[0]}px #{size[1]}px]({url})" return f"![image #{size[0]}px #{size[1]}px]({url})"
@staticmethod @staticmethod
async def image_async(url: str) -> str: async def image_async(url: str) -> str:
"""获取图片,自动请求获取大小,异步 """获取图片,自动请求获取大小,异步
Args: Args:
url: 图片链接 url: 图片链接
Returns: Returns:
图片Markdown语法: ![image #{width}px #{height}px](link) 图片Markdown语法: ![image #{width}px #{height}px](link)
""" """
try: try:
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
async with session.get(url) as resp: async with session.get(url) as resp:
image = Image.open(io.BytesIO(await resp.read())) image = Image.open(io.BytesIO(await resp.read()))
return MarkdownMessage.image(url, image.size) return MarkdownMessage.image(url, image.size)
except Exception as e: except Exception as e:
nonebot.logger.error(f"get image error: {e}") nonebot.logger.error(f"get image error: {e}")
return "[Image Error]" return "[Image Error]"
@staticmethod @staticmethod
def escape(text: str) -> str: def escape(text: str) -> str:
"""转义特殊字符 """转义特殊字符
Args: Args:
text: 需要转义的文本请勿直接把整个markdown文本传入否则会转义掉所有字符 text: 需要转义的文本请勿直接把整个markdown文本传入否则会转义掉所有字符
Returns: Returns:
转义后的文本 转义后的文本
""" """
chars = "*[]()~_`>#+=|{}.!" chars = "*[]()~_`>#+=|{}.!"
for char in chars: for char in chars:
text = text.replace(char, f"\\\\{char}") text = text.replace(char, f"\\\\{char}")
return text return text

View File

@ -1,101 +1,101 @@
import nonebot import nonebot
def convert_duration(text: str, default) -> float: def convert_duration(text: str, default) -> float:
""" """
转换自然语言时间为秒数 转换自然语言时间为秒数
Args: Args:
text: 1d2h3m text: 1d2h3m
default: 出错时返回 default: 出错时返回
Returns: Returns:
float: 总秒数 float: 总秒数
""" """
units = { units = {
"d" : 86400, "d" : 86400,
"h" : 3600, "h" : 3600,
"m" : 60, "m" : 60,
"s" : 1, "s" : 1,
"ms": 0.001 "ms": 0.001
} }
duration = 0 duration = 0
current_number = '' current_number = ''
current_unit = '' current_unit = ''
try: try:
for char in text: for char in text:
if char.isdigit(): if char.isdigit():
current_number += char current_number += char
else: else:
if current_number: if current_number:
duration += int(current_number) * units[current_unit] duration += int(current_number) * units[current_unit]
current_number = '' current_number = ''
if char in units: if char in units:
current_unit = char current_unit = char
else: else:
current_unit = '' current_unit = ''
if current_number: if current_number:
duration += int(current_number) * units[current_unit] duration += int(current_number) * units[current_unit]
return duration return duration
except BaseException as e: except BaseException as e:
nonebot.logger.info(f"convert_duration error: {e}") nonebot.logger.info(f"convert_duration error: {e}")
return default return default
def convert_time_to_seconds(time_str): def convert_time_to_seconds(time_str):
"""转换自然语言时长为秒数 """转换自然语言时长为秒数
Args: Args:
time_str: 1d2m3s time_str: 1d2m3s
Returns: Returns:
""" """
seconds = 0 seconds = 0
current_number = '' current_number = ''
for char in time_str: for char in time_str:
if char.isdigit() or char == '.': if char.isdigit() or char == '.':
current_number += char current_number += char
elif char == 'd': elif char == 'd':
seconds += float(current_number) * 24 * 60 * 60 seconds += float(current_number) * 24 * 60 * 60
current_number = '' current_number = ''
elif char == 'h': elif char == 'h':
seconds += float(current_number) * 60 * 60 seconds += float(current_number) * 60 * 60
current_number = '' current_number = ''
elif char == 'm': elif char == 'm':
seconds += float(current_number) * 60 seconds += float(current_number) * 60
current_number = '' current_number = ''
elif char == 's': elif char == 's':
seconds += float(current_number) seconds += float(current_number)
current_number = '' current_number = ''
return int(seconds) return int(seconds)
def convert_seconds_to_time(seconds): def convert_seconds_to_time(seconds):
"""转换秒数为自然语言时长 """转换秒数为自然语言时长
Args: Args:
seconds: 10000 seconds: 10000
Returns: Returns:
""" """
d = seconds // (24 * 60 * 60) d = seconds // (24 * 60 * 60)
h = (seconds % (24 * 60 * 60)) // (60 * 60) h = (seconds % (24 * 60 * 60)) // (60 * 60)
m = (seconds % (60 * 60)) // 60 m = (seconds % (60 * 60)) // 60
s = seconds % 60 s = seconds % 60
# 若值为0则不显示 # 若值为0则不显示
time_str = '' time_str = ''
if d: if d:
time_str += f"{d}d" time_str += f"{d}d"
if h: if h:
time_str += f"{h}h" time_str += f"{h}h"
if m: if m:
time_str += f"{m}m" time_str += f"{m}m"
if not time_str: if not time_str:
time_str = f"{s}s" time_str = f"{s}s"
return time_str return time_str

View File

@ -1,99 +1,99 @@
import random import random
from importlib.metadata import PackageNotFoundError, version from importlib.metadata import PackageNotFoundError, version
def clamp(value: float, min_value: float, max_value: float) -> float | int: def clamp(value: float, min_value: float, max_value: float) -> float | int:
"""将值限制在最小值和最大值之间 """将值限制在最小值和最大值之间
Args: Args:
value (float): 要限制的值 value (float): 要限制的值
min_value (float): 最小值 min_value (float): 最小值
max_value (float): 最大值 max_value (float): 最大值
Returns: Returns:
float: 限制后的值 float: 限制后的值
""" """
return max(min(value, max_value), min_value) return max(min(value, max_value), min_value)
def convert_size(size: int, precision: int = 2, add_unit: bool = True, suffix: str = " XiB") -> str | float: def convert_size(size: int, precision: int = 2, add_unit: bool = True, suffix: str = " XiB") -> str | float:
"""把字节数转换为人类可读的字符串,计算正负 """把字节数转换为人类可读的字符串,计算正负
Args: Args:
add_unit: 是否添加单位False后则suffix无效 add_unit: 是否添加单位False后则suffix无效
suffix: XiB或XB suffix: XiB或XB
precision: 浮点数的小数点位数 precision: 浮点数的小数点位数
size (int): 字节数 size (int): 字节数
Returns: Returns:
str: The human-readable string, e.g. "1.23 GB". str: The human-readable string, e.g. "1.23 GB".
""" """
is_negative = size < 0 is_negative = size < 0
size = abs(size) size = abs(size)
for unit in ("", "K", "M", "G", "T", "P", "E", "Z"): for unit in ("", "K", "M", "G", "T", "P", "E", "Z"):
if size < 1024: if size < 1024:
break break
size /= 1024 size /= 1024
if is_negative: if is_negative:
size = -size size = -size
if add_unit: if add_unit:
return f"{size:.{precision}f}{suffix.replace('X', unit)}" return f"{size:.{precision}f}{suffix.replace('X', unit)}"
else: else:
return size return size
def keywords_in_text(keywords: list[str], text: str, all_matched: bool) -> bool: def keywords_in_text(keywords: list[str], text: str, all_matched: bool) -> bool:
""" """
检查关键词是否在文本中 检查关键词是否在文本中
Args: Args:
keywords: 关键词列表 keywords: 关键词列表
text: 文本 text: 文本
all_matched: 是否需要全部匹配 all_matched: 是否需要全部匹配
Returns: Returns:
""" """
if all_matched: if all_matched:
for keyword in keywords: for keyword in keywords:
if keyword not in text: if keyword not in text:
return False return False
return True return True
else: else:
for keyword in keywords: for keyword in keywords:
if keyword in text: if keyword in text:
return True return True
return False return False
def check_for_package(package_name: str) -> bool: def check_for_package(package_name: str) -> bool:
try: try:
version(package_name) version(package_name)
return True return True
except PackageNotFoundError: except PackageNotFoundError:
return False return False
def random_ascii_string(length: int) -> str: def random_ascii_string(length: int) -> str:
""" """
生成随机ASCII字符串 生成随机ASCII字符串
Args: Args:
length: length:
Returns: Returns:
""" """
return "".join([chr(random.randint(33, 126)) for _ in range(length)]) return "".join([chr(random.randint(33, 126)) for _ in range(length)])
def random_hex_string(length: int) -> str: def random_hex_string(length: int) -> str:
""" """
生成随机十六进制字符串 生成随机十六进制字符串
Args: Args:
length: length:
Returns: Returns:
""" """
return "".join([random.choice("0123456789abcdef") for _ in range(length)]) return "".join([random.choice("0123456789abcdef") for _ in range(length)])

View File

@ -1,3 +1,3 @@
from .user_info import user_infos from .user_info import user_infos
from .count_friends import count_friends from .count_friends import count_friends
from .count_groups import count_groups from .count_groups import count_groups

View File

@ -1,13 +1,13 @@
from nonebot.adapters import satori from nonebot.adapters import satori
async def count_friends(bot: satori.Bot) -> int: async def count_friends(bot: satori.Bot) -> int:
cnt: int = 0 cnt: int = 0
friend_response = await bot.friend_list() friend_response = await bot.friend_list()
while friend_response.next is not None: while friend_response.next is not None:
cnt += len(friend_response.data) cnt += len(friend_response.data)
friend_response = await bot.friend_list(next_token=friend_response.next) friend_response = await bot.friend_list(next_token=friend_response.next)
cnt += len(friend_response.data) cnt += len(friend_response.data)
return cnt - 1 return cnt - 1

View File

@ -1,13 +1,13 @@
from nonebot.adapters import satori from nonebot.adapters import satori
async def count_groups(bot: satori.Bot) -> int: async def count_groups(bot: satori.Bot) -> int:
cnt: int = 0 cnt: int = 0
group_response = await bot.guild_list() group_response = await bot.guild_list()
while group_response.next is not None: while group_response.next is not None:
cnt += len(group_response.data) cnt += len(group_response.data)
group_response = await bot.friend_list(next_token=group_response.next) group_response = await bot.friend_list(next_token=group_response.next)
cnt += len(group_response.data) cnt += len(group_response.data)
return cnt - 1 return cnt - 1

View File

@ -1,64 +1,64 @@
import nonebot import nonebot
from nonebot.adapters import satori from nonebot.adapters import satori
from nonebot.adapters.satori.models import User from nonebot.adapters.satori.models import User
class UserInfo: class UserInfo:
user_infos: dict = {} user_infos: dict = {}
async def load_friends(self, bot: satori.Bot): async def load_friends(self, bot: satori.Bot):
nonebot.logger.info("Update user info from friends") nonebot.logger.info("Update user info from friends")
friend_response = await bot.friend_list() friend_response = await bot.friend_list()
while friend_response.next is not None: while friend_response.next is not None:
for i in friend_response.data: for i in friend_response.data:
i: User = i i: User = i
self.user_infos[str(i.id)] = i self.user_infos[str(i.id)] = i
friend_response = await bot.friend_list(next_token=friend_response.next) friend_response = await bot.friend_list(next_token=friend_response.next)
for i in friend_response.data: for i in friend_response.data:
i: User = i i: User = i
self.user_infos[str(i.id)] = i self.user_infos[str(i.id)] = i
nonebot.logger.info("Finish update user info") nonebot.logger.info("Finish update user info")
async def get(self, uid: int | str) -> User | None: async def get(self, uid: int | str) -> User | None:
try: try:
return self.user_infos[str(uid)] return self.user_infos[str(uid)]
except KeyError: except KeyError:
return None return None
async def put(self, user: User) -> bool: async def put(self, user: User) -> bool:
""" """
向用户信息数据库中添加/修改一项返回值仅代表数据是否变更不代表操作是否成功 向用户信息数据库中添加/修改一项返回值仅代表数据是否变更不代表操作是否成功
Args: Args:
user: 要加入数据库的用户 user: 要加入数据库的用户
Returns: 当数据库中用户信息发生变化时返回 True, 否则返回 False Returns: 当数据库中用户信息发生变化时返回 True, 否则返回 False
""" """
try: try:
old_user: User = self.user_infos[str(user.id)] old_user: User = self.user_infos[str(user.id)]
attr_edited = False attr_edited = False
if user.name is not None: if user.name is not None:
if old_user.name != user.name: if old_user.name != user.name:
attr_edited = True attr_edited = True
self.user_infos[str(user.id)].name = user.name self.user_infos[str(user.id)].name = user.name
if user.nick is not None: if user.nick is not None:
if old_user.nick != user.nick: if old_user.nick != user.nick:
attr_edited = True attr_edited = True
self.user_infos[str(user.id)].nick = user.nick self.user_infos[str(user.id)].nick = user.nick
if user.avatar is not None: if user.avatar is not None:
if old_user.avatar != user.avatar: if old_user.avatar != user.avatar:
attr_edited = True attr_edited = True
self.user_infos[str(user.id)].avatar = user.avatar self.user_infos[str(user.id)].avatar = user.avatar
return attr_edited return attr_edited
except KeyError: except KeyError:
self.user_infos[str(user.id)] = user self.user_infos[str(user.id)] = user
return True return True
def __init__(self): def __init__(self):
pass pass
user_infos = UserInfo() user_infos = UserInfo()

22
tests/test_ipc.py Normal file
View File

@ -0,0 +1,22 @@
from liteyuki.comm import Channel as Chan
from multiprocessing import Process
def p1(chan: Chan):
for i in range(10):
chan.send(i)
def p2(chan: Chan):
while True:
print(chan.recv())
def test_ipc():
chan = Chan("Name")
p1_proc = Process(target=p1, args=(chan,))
p2_proc = Process(target=p2, args=(chan,))
p1_proc.start()
p2_proc.start()