From 98a9d6413a629aeee2673628c5092fad88d5adb3 Mon Sep 17 00:00:00 2001 From: snowykami Date: Sun, 13 Oct 2024 02:51:33 +0800 Subject: [PATCH] =?UTF-8?q?:sparkles:=20=E5=88=86=E7=A6=BB`magicoca`?= =?UTF-8?q?=E5=92=8C`croterline`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/pypi-publish.yml | 5 +- liteyuki/bot/__init__.py | 42 +- liteyuki/comm/channel.py | 125 ++- liteyuki/comm/rpc.py | 26 - liteyuki/core/manager.py | 107 ++- liteyuki/plugin/load.py | 1 - pdm.lock | 510 ++++++++++ pyproject.toml | 26 +- requirements.txt | 8 +- .../liteyukibot_plugin_nonebot/__init__.py | 53 -- .../dev_reloader.py | 24 - .../nb_utils/adapter_manager/__init__.py | 14 - .../nb_utils/adapter_manager/onebot.py | 12 - .../nb_utils/adapter_manager/satori.py | 26 - .../nb_utils/driver_manager/__init__.py | 6 - .../nb_utils/driver_manager/auto_set_env.py | 20 - .../nb_utils/driver_manager/defines.py | 17 - .../liteyukibot_plugin_nonebot/py.typed | 0 src/liteyuki_plugins/nonebot/__init__.py | 27 + .../liteyuki_mctools/__init__.py | 0 .../liteyuki_pacman/__init__.py | 0 .../nonebot_plugins/liteyuki_pacman/common.py | 0 .../nonebot_plugins/liteyuki_pacman/npm.py | 0 .../nonebot_plugins/liteyuki_pacman/rpm.py | 0 .../liteyuki_smart_reply/__init__.py | 0 .../liteyuki_smart_reply/matchers.py | 0 .../liteyuki_smart_reply/monitors.py | 0 .../liteyuki_smart_reply/utils.py | 0 .../liteyuki_statistics/__init__.py | 0 .../liteyuki_statistics/common.py | 0 .../liteyuki_statistics/data_source.py | 2 +- .../liteyuki_statistics/stat_matchers.py | 0 .../liteyuki_statistics/stat_monitors.py | 0 .../liteyuki_statistics/stat_restful_api.py | 0 .../liteyuki_statistics/word_cloud/LICENSE | 0 .../word_cloud/data_source.py | 0 .../liteyuki_status/__init__.py | 0 .../nonebot_plugins/liteyuki_status/api.py | 0 .../liteyuki_status/counter_for_satori.py | 0 .../nonebot_plugins/liteyuki_status/status.py | 0 .../liteyuki_uniblacklist/__init__.py | 0 .../liteyuki_uniblacklist/api.py | 0 .../nonebot_plugins/liteyuki_user/__init__.py | 0 .../nonebot_plugins/liteyuki_user/const.py | 0 .../liteyuki_user/input_handle.py | 0 .../liteyuki_user/profile_manager.py | 0 .../liteyuki_weather/__init__.py | 2 +- .../liteyuki_weather/qw_api.py | 4 +- .../liteyuki_weather/qw_models.py | 0 .../liteyuki_weather/qweather.py | 0 .../nonebot_plugins/packmanv2/__init__.py | 0 .../nonebot_plugins/packmanv2/handle.py | 0 .../nonebot_plugins/packmanv2/npm/__init__.py | 0 .../packmanv2/npm/data_source.py | 0 .../nonebot_plugins/packmanv2/rpm/__init__.py | 0 .../nonebot}/nonebot_plugins/to_liteyuki.py | 0 .../nonebot_plugins/webdash/__init__.py | 0 .../nonebot_plugins/webdash/common.py | 0 .../nonebot}/nonebot_plugins/webdash/main.py | 0 .../nonebot_plugins/webdash/restful_api.py | 0 .../nonebot/np_main}/__init__.py | 38 +- .../nonebot/np_main}/api.py | 94 +- .../nonebot/np_main}/core.py | 602 ++++++------ .../nonebot/np_main}/loader.py | 72 +- .../nonebot/np_main}/uitls.py | 0 .../liteyuki_satori_user_info/__init__.py | 16 - .../liteyuki_satori_user_info/auto_update.py | 20 - src/utils/__init__.py | 84 +- src/utils/base/config.py | 218 ++--- src/utils/base/data.py | 870 +++++++++--------- src/utils/base/data_manager.py | 198 ++-- src/utils/base/language.py | 474 +++++----- src/utils/base/log.py | 158 ++-- src/utils/base/ly_function.py | 394 ++++---- src/utils/base/ly_typing.py | 16 +- src/utils/base/permission.py | 10 +- src/utils/base/resource.py | 710 +++++++------- src/utils/base/word_bank.py | 114 +-- src/utils/event/__init__.py | 2 +- src/utils/event/get_info.py | 52 +- src/utils/external/logo.py | 80 +- src/utils/message/html_tool.py | 178 ++-- src/utils/message/markdown.py | 418 ++++----- src/utils/message/message.py | 404 ++++---- src/utils/message/string_tool.py | 202 ++-- src/utils/message/tools.py | 198 ++-- src/utils/satori_utils/__init__.py | 6 +- src/utils/satori_utils/count_friends.py | 26 +- src/utils/satori_utils/count_groups.py | 26 +- src/utils/satori_utils/user_info.py | 128 +-- tests/test_ipc.py | 22 + 91 files changed, 3675 insertions(+), 3212 deletions(-) delete mode 100644 liteyuki/comm/rpc.py create mode 100644 pdm.lock delete mode 100644 src/liteyuki_plugins/liteyukibot_plugin_nonebot/__init__.py delete mode 100644 src/liteyuki_plugins/liteyukibot_plugin_nonebot/dev_reloader.py delete mode 100644 src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/adapter_manager/__init__.py delete mode 100644 src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/adapter_manager/onebot.py delete mode 100644 src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/adapter_manager/satori.py delete mode 100644 src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/driver_manager/__init__.py delete mode 100644 src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/driver_manager/auto_set_env.py delete mode 100644 src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/driver_manager/defines.py delete mode 100644 src/liteyuki_plugins/liteyukibot_plugin_nonebot/py.typed create mode 100644 src/liteyuki_plugins/nonebot/__init__.py rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_mctools/__init__.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_pacman/__init__.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_pacman/common.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_pacman/npm.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_pacman/rpm.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_smart_reply/__init__.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_smart_reply/matchers.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_smart_reply/monitors.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_smart_reply/utils.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_statistics/__init__.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_statistics/common.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_statistics/data_source.py (98%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_statistics/stat_matchers.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_statistics/stat_monitors.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_statistics/stat_restful_api.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_statistics/word_cloud/LICENSE (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_statistics/word_cloud/data_source.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_status/__init__.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_status/api.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_status/counter_for_satori.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_status/status.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_uniblacklist/__init__.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_uniblacklist/api.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_user/__init__.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_user/const.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_user/input_handle.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_user/profile_manager.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_weather/__init__.py (91%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_weather/qw_api.py (98%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_weather/qw_models.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/liteyuki_weather/qweather.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/packmanv2/__init__.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/packmanv2/handle.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/packmanv2/npm/__init__.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/packmanv2/npm/data_source.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/packmanv2/rpm/__init__.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/to_liteyuki.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/webdash/__init__.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/webdash/common.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/webdash/main.py (100%) rename src/{ => liteyuki_plugins/nonebot}/nonebot_plugins/webdash/restful_api.py (100%) rename src/{liteyuki_main => liteyuki_plugins/nonebot/np_main}/__init__.py (86%) rename src/{liteyuki_main => liteyuki_plugins/nonebot/np_main}/api.py (96%) rename src/{liteyuki_main => liteyuki_plugins/nonebot/np_main}/core.py (95%) rename src/{liteyuki_main => liteyuki_plugins/nonebot/np_main}/loader.py (75%) rename src/{liteyuki_main => liteyuki_plugins/nonebot/np_main}/uitls.py (100%) delete mode 100644 src/nonebot_plugins/liteyuki_satori_user_info/__init__.py delete mode 100644 src/nonebot_plugins/liteyuki_satori_user_info/auto_update.py create mode 100644 tests/test_ipc.py diff --git a/.github/workflows/pypi-publish.yml b/.github/workflows/pypi-publish.yml index 4ede3b5e..402eae8a 100644 --- a/.github/workflows/pypi-publish.yml +++ b/.github/workflows/pypi-publish.yml @@ -1,8 +1,9 @@ name: Publish on: - release: - types: [published] + push: + tags: + - 'v*' jobs: pypi-publish: diff --git a/liteyuki/bot/__init__.py b/liteyuki/bot/__init__.py index de6fba42..67abe2ec 100644 --- a/liteyuki/bot/__init__.py +++ b/liteyuki/bot/__init__.py @@ -8,18 +8,21 @@ import threading import time from typing import Any, Optional -from liteyuki.bot.lifespan import (LIFESPAN_FUNC, Lifespan, PROCESS_LIFESPAN_FUNC) +from liteyuki.bot.lifespan import LIFESPAN_FUNC, Lifespan, PROCESS_LIFESPAN_FUNC from liteyuki.comm.channel import get_channel from liteyuki.core.manager import ProcessManager from liteyuki.log import init_log, logger from liteyuki.plugin import load_plugin from liteyuki.utils import IS_MAIN_PROCESS +# new version +from liteyuki.core.manager import sub_process_manager + __all__ = [ - "LiteyukiBot", - "get_bot", - "get_config", - "get_config_with_compat", + "LiteyukiBot", + "get_bot", + "get_config", + "get_config_with_compat", ] @@ -60,6 +63,7 @@ class LiteyukiBot: 启动逻辑 """ await self.lifespan.before_start() # 启动前钩子 + sub_process_manager.start_all() await self.lifespan.after_start() # 启动后钩子 await self.keep_alive() @@ -108,7 +112,11 @@ class LiteyukiBot: cmd = "nohup" self.process_manager.terminate_all() # 进程退出后重启 - threading.Thread(target=os.system, args=(f"{cmd} {executable} {' '.join(args)}",), daemon=True).start() + threading.Thread( + target=os.system, + args=(f"{cmd} {executable} {' '.join(args)}",), + daemon=True, + ).start() sys.exit(0) self.call_restart_count += 1 @@ -189,7 +197,9 @@ class LiteyukiBot: """ return self.lifespan.on_before_process_shutdown(func) - def on_before_process_restart(self, func: PROCESS_LIFESPAN_FUNC) -> PROCESS_LIFESPAN_FUNC: + def on_before_process_restart( + self, func: PROCESS_LIFESPAN_FUNC + ) -> PROCESS_LIFESPAN_FUNC: """ 注册进程重启前的函数,为子进程重启时调用 Args: @@ -211,7 +221,7 @@ class LiteyukiBot: return self.lifespan.on_after_restart(func) -_BOT_INSTANCE: LiteyukiBot +_BOT_INSTANCE: LiteyukiBot | None = None def get_bot() -> LiteyukiBot: @@ -241,7 +251,9 @@ def get_config(key: str, default: Any = None) -> Any: return get_bot().config.get(key, default) -def get_config_with_compat(key: str, compat_keys: tuple[str], default: Any = None) -> Any: +def get_config_with_compat( + key: str, compat_keys: tuple[str], default: Any = None +) -> Any: """ 获取配置,兼容旧版本 Args: @@ -256,14 +268,18 @@ def get_config_with_compat(key: str, compat_keys: tuple[str], default: Any = Non return get_bot().config[key] for compat_key in compat_keys: if compat_key in get_bot().config: - logger.warning(f"Config key \"{compat_key}\" will be deprecated, use \"{key}\" instead.") + logger.warning( + f'Config key "{compat_key}" will be deprecated, use "{key}" instead.' + ) return get_bot().config[compat_key] return default def print_logo(): """@litedoc-hide""" - print("\033[34m" + r""" + print( + "\033[34m" + + r""" __ ______ ________ ________ __ __ __ __ __ __ ______ / | / |/ |/ |/ \ / |/ | / |/ | / |/ | $$ | $$$$$$/ $$$$$$$$/ $$$$$$$$/ $$ \ /$$/ $$ | $$ |$$ | /$$/ $$$$$$/ @@ -273,4 +289,6 @@ def print_logo(): $$ |_____ _$$ |_ $$ | $$ |_____ $$ | $$ \__$$ |$$ |$$ \ _$$ |_ $$ |/ $$ | $$ | $$ | $$ | $$ $$/ $$ | $$ |/ $$ | $$$$$$$$/ $$$$$$/ $$/ $$$$$$$$/ $$/ $$$$$$/ $$/ $$/ $$$$$$/ - """ + "\033[0m") + """ + + "\033[0m" + ) diff --git a/liteyuki/comm/channel.py b/liteyuki/comm/channel.py index 38c2a46d..2ca9449c 100644 --- a/liteyuki/comm/channel.py +++ b/liteyuki/comm/channel.py @@ -4,20 +4,31 @@ """ import asyncio from multiprocessing import Pipe -from typing import Any, Callable, Coroutine, Generic, Optional, TypeAlias, TypeVar, get_args +from typing import ( + Any, + Callable, + Coroutine, + Generic, + Optional, + TypeAlias, + TypeVar, + get_args, +) from liteyuki.log import logger from liteyuki.utils import IS_MAIN_PROCESS, is_coroutine_callable T = TypeVar("T") -SYNC_ON_RECEIVE_FUNC: TypeAlias = Callable[[T], Any] # 同步接收函数 -ASYNC_ON_RECEIVE_FUNC: TypeAlias = Callable[[T], Coroutine[Any, Any, Any]] # 异步接收函数 -ON_RECEIVE_FUNC: TypeAlias = SYNC_ON_RECEIVE_FUNC | ASYNC_ON_RECEIVE_FUNC # 接收函数 +SYNC_ON_RECEIVE_FUNC: TypeAlias = Callable[[T], Any] # 同步接收函数 +ASYNC_ON_RECEIVE_FUNC: TypeAlias = Callable[ + [T], Coroutine[Any, Any, Any] +] # 异步接收函数 +ON_RECEIVE_FUNC: TypeAlias = SYNC_ON_RECEIVE_FUNC | ASYNC_ON_RECEIVE_FUNC # 接收函数 -SYNC_FILTER_FUNC: TypeAlias = Callable[[T], bool] # 同步过滤函数 -ASYNC_FILTER_FUNC: TypeAlias = Callable[[T], Coroutine[Any, Any, bool]] # 异步过滤函数 -FILTER_FUNC: TypeAlias = SYNC_FILTER_FUNC | ASYNC_FILTER_FUNC # 过滤函数 +SYNC_FILTER_FUNC: TypeAlias = Callable[[T], bool] # 同步过滤函数 +ASYNC_FILTER_FUNC: TypeAlias = Callable[[T], Coroutine[Any, Any, bool]] # 异步过滤函数 +FILTER_FUNC: TypeAlias = SYNC_FILTER_FUNC | ASYNC_FILTER_FUNC # 过滤函数 _func_id: int = 0 _channel: dict[str, "Channel"] = {} @@ -39,7 +50,9 @@ class Channel(Generic[T]): """ self.conn_send, self.conn_recv = Pipe() - self._conn_send_inner, self._conn_recv_inner = Pipe() # 内部通道,用于子进程通信 + self._conn_send_inner, self._conn_recv_inner = ( + Pipe() + ) # 内部通道,用于子进程通信 self._closed = False self._on_main_receive_func_ids: list[int] = [] self._on_sub_receive_func_ids: list[int] = [] @@ -64,7 +77,9 @@ class Channel(Generic[T]): _channel[name] = self logger.debug(f"Channel {name} initialized in main process") else: - logger.debug(f"Channel {name} initialized in sub process, should manually set in main process") + logger.debug( + f"Channel {name} initialized in sub process, should manually set in main process" + ) def _get_generic_type(self) -> Optional[type]: """ @@ -72,7 +87,7 @@ class Channel(Generic[T]): Returns: Optional[type]: 泛型类型 """ - if hasattr(self, '__orig_class__'): + if hasattr(self, "__orig_class__"): return get_args(self.__orig_class__)[0] return None @@ -98,7 +113,10 @@ class Channel(Generic[T]): elif isinstance(structure, dict): if not isinstance(data, dict): return False - return all(k in data and self._validate_structure(data[k], structure[k]) for k in structure) + return all( + k in data and self._validate_structure(data[k], structure[k]) + for k in structure + ) return False def __str__(self): @@ -113,10 +131,12 @@ class Channel(Generic[T]): if self.type_check: _type = self._get_generic_type() if _type is not None and not self._validate_structure(data, _type): - raise TypeError(f"Data must be an instance of {_type}, {type(data)} found") + raise TypeError( + f"Data must be an instance of {_type}, {type(data)} found" + ) if self._closed: - raise RuntimeError("Cannot send to a closed channel_") + raise RuntimeError("Cannot send to a closed channel") self.conn_send.send(data) def receive(self) -> T: @@ -126,7 +146,7 @@ class Channel(Generic[T]): T: 数据 """ if self._closed: - raise RuntimeError("Cannot receive from a closed channel_") + raise RuntimeError("Cannot receive from a closed channel") while True: data = self.conn_recv.recv() @@ -142,7 +162,9 @@ class Channel(Generic[T]): data = await loop.run_in_executor(None, self.receive) return data - def on_receive(self, filter_func: Optional[FILTER_FUNC] = None) -> Callable[[Callable[[T], Any]], Callable[[T], Any]]: + def on_receive( + self, filter_func: Optional[FILTER_FUNC] = None + ) -> Callable[[Callable[[T], Any]], Callable[[T], Any]]: """ 接收数据并执行函数 Args: @@ -187,37 +209,52 @@ class Channel(Generic[T]): data: 数据 """ if IS_MAIN_PROCESS: - [asyncio.create_task(_callback_funcs[func_id](data)) for func_id in self._on_main_receive_func_ids] + [ + asyncio.create_task(_callback_funcs[func_id](data)) + for func_id in self._on_main_receive_func_ids + ] else: - [asyncio.create_task(_callback_funcs[func_id](data)) for func_id in self._on_sub_receive_func_ids] + [ + asyncio.create_task(_callback_funcs[func_id](data)) + for func_id in self._on_sub_receive_func_ids + ] """子进程可用的主动和被动通道""" -active_channel: Channel = Channel(name="active_channel") # 主动通道 +active_channel: Channel = Channel(name="active_channel") # 主动通道 passive_channel: Channel = Channel(name="passive_channel") # 被动通道 -publish_channel: Channel[tuple[str, dict[str, Any]]] = Channel(name="publish_channel") # 发布通道 +publish_channel: Channel[tuple[str, dict[str, Any]]] = Channel( + name="publish_channel" +) # 发布通道 """通道传递通道,主进程创建单例,子进程初始化时实例化""" -channel_deliver_active_channel: Channel[Channel[Any]] # 主动通道传递通道 -channel_deliver_passive_channel: Channel[tuple[str, dict[str, Any]]] # 被动通道传递通道 +channel_deliver_active_channel: Channel[Channel[Any]] # 主动通道传递通道 +channel_deliver_passive_channel: Channel[tuple[str, dict[str, Any]]] # 被动通道传递通道 if IS_MAIN_PROCESS: - channel_deliver_active_channel = Channel(name="channel_deliver_active_channel") # 主动通道传递通道 - channel_deliver_passive_channel = Channel(name="channel_deliver_passive_channel") # 被动通道传递通道 + channel_deliver_active_channel = Channel( + name="channel_deliver_active_channel" + ) # 主动通道传递通道 + channel_deliver_passive_channel = Channel( + name="channel_deliver_passive_channel" + ) # 被动通道传递通道 - - @channel_deliver_passive_channel.on_receive(filter_func=lambda data: data[0] == "set_channel") + @channel_deliver_passive_channel.on_receive( + filter_func=lambda data: data[0] == "set_channel" + ) def on_set_channel(data: tuple[str, dict[str, Any]]): name, channel = data[1]["name"], data[1]["channel_"] set_channel(name, channel) - - @channel_deliver_passive_channel.on_receive(filter_func=lambda data: data[0] == "get_channel") + @channel_deliver_passive_channel.on_receive( + filter_func=lambda data: data[0] == "get_channel" + ) def on_get_channel(data: tuple[str, dict[str, Any]]): name, recv_chan = data[1]["name"], data[1]["recv_chan"] recv_chan.send(get_channel(name)) - - @channel_deliver_passive_channel.on_receive(filter_func=lambda data: data[0] == "get_channels") + @channel_deliver_passive_channel.on_receive( + filter_func=lambda data: data[0] == "get_channels" + ) def on_get_channels(data: tuple[str, dict[str, Any]]): recv_chan = data[1]["recv_chan"] recv_chan.send(get_channels()) @@ -231,7 +268,9 @@ def set_channel(name: str, channel: "Channel"): channel ([`Channel`](#class-channel-generic-t)): 通道实例 """ if not isinstance(channel, Channel): - raise TypeError(f"channel_ must be an instance of Channel, {type(channel)} found") + raise TypeError( + f"channel_ must be an instance of Channel, {type(channel)} found" + ) if IS_MAIN_PROCESS: if name in _channel: @@ -241,10 +280,11 @@ def set_channel(name: str, channel: "Channel"): # 请求主进程设置通道 channel_deliver_passive_channel.send( ( - "set_channel", { - "name" : name, - "channel_": channel, - } + "set_channel", + { + "name": name, + "channel_": channel, + }, ) ) @@ -273,13 +313,7 @@ def get_channel(name: str) -> "Channel": else: recv_chan = Channel[Channel[Any]]("recv_chan") channel_deliver_passive_channel.send( - ( - "get_channel", - { - "name" : name, - "recv_chan": recv_chan - } - ) + ("get_channel", {"name": name, "recv_chan": recv_chan}) ) return recv_chan.receive() @@ -294,12 +328,5 @@ def get_channels() -> dict[str, "Channel"]: return _channel else: recv_chan = Channel[dict[str, Channel[Any]]]("recv_chan") - channel_deliver_passive_channel.send( - ( - "get_channels", - { - "recv_chan": recv_chan - } - ) - ) + channel_deliver_passive_channel.send(("get_channels", {"recv_chan": recv_chan})) return recv_chan.receive() diff --git a/liteyuki/comm/rpc.py b/liteyuki/comm/rpc.py deleted file mode 100644 index 6b46f081..00000000 --- a/liteyuki/comm/rpc.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -""" -本模块用于实现RPC(基于IPC)通信 -""" - -from typing import TypeAlias, Callable, Any - -from liteyuki.comm.channel import Channel - -ON_CALLING_FUNC: TypeAlias = Callable[[tuple, dict], Any] - - -class RPC: - """ - RPC类 - """ - - def __init__(self, on_calling: ON_CALLING_FUNC) -> None: - self.on_calling = on_calling - - def call(self, args: tuple, kwargs: dict) -> Any: - """ - 调用 - """ - # 获取self.calling函数名 - return self.on_calling(args, kwargs) diff --git a/liteyuki/core/manager.py b/liteyuki/core/manager.py index 3d76006c..5108d70d 100644 --- a/liteyuki/core/manager.py +++ b/liteyuki/core/manager.py @@ -14,6 +14,9 @@ import threading from multiprocessing import Process from typing import Any, Callable, TYPE_CHECKING, TypeAlias +from croterline.context import Context +from croterline.process import SubProcess, ProcessFuncType + from liteyuki.log import logger from liteyuki.utils import IS_MAIN_PROCESS @@ -26,7 +29,10 @@ from liteyuki.comm import Channel if IS_MAIN_PROCESS: from liteyuki.comm.channel import get_channel, publish_channel, get_channels from liteyuki.comm.storage import shared_memory - from liteyuki.comm.channel import channel_deliver_active_channel, channel_deliver_passive_channel + from liteyuki.comm.channel import ( + channel_deliver_active_channel, + channel_deliver_passive_channel, + ) else: from liteyuki.comm import channel from liteyuki.comm import storage @@ -34,20 +40,18 @@ else: TARGET_FUNC: TypeAlias = Callable[..., Any] TIMEOUT = 10 -__all__ = [ - "ProcessManager" -] +__all__ = ["ProcessManager", "sub_process_manager"] multiprocessing.set_start_method("spawn", force=True) class ChannelDeliver: def __init__( - self, - active: Channel[Any], - passive: Channel[Any], - channel_deliver_active: Channel[Channel[Any]], - channel_deliver_passive: Channel[tuple[str, dict]], - publish: Channel[tuple[str, Any]], + self, + active: Channel[Any], + passive: Channel[Any], + channel_deliver_active: Channel[Channel[Any]], + channel_deliver_passive: Channel[tuple[str, dict]], + publish: Channel[tuple[str, Any]], ): self.active = active self.passive = passive @@ -57,7 +61,9 @@ class ChannelDeliver: # 函数处理一些跨进程通道的 -def _delivery_channel_wrapper(func: TARGET_FUNC, cd: ChannelDeliver, sm: "KeyValueStore", *args, **kwargs): +def _delivery_channel_wrapper( + func: TARGET_FUNC, cd: ChannelDeliver, sm: "KeyValueStore", *args, **kwargs +): """ 子进程入口函数 处理一些操作 @@ -68,8 +74,12 @@ def _delivery_channel_wrapper(func: TARGET_FUNC, cd: ChannelDeliver, sm: "KeyVal channel.active_channel = cd.active # 子进程主动通道 channel.passive_channel = cd.passive # 子进程被动通道 - channel.channel_deliver_active_channel = cd.channel_deliver_active # 子进程通道传递主动通道 - channel.channel_deliver_passive_channel = cd.channel_deliver_passive # 子进程通道传递被动通道 + channel.channel_deliver_active_channel = ( + cd.channel_deliver_active + ) # 子进程通道传递主动通道 + channel.channel_deliver_passive_channel = ( + cd.channel_deliver_passive + ) # 子进程通道传递被动通道 channel.publish_channel = cd.publish # 子进程发布通道 # 给子进程创建共享内存实例 @@ -102,8 +112,12 @@ class ProcessManager: chan_active = get_channel(f"{name}-active") def _start_process(): - process = Process(target=self.targets[name][0], args=self.targets[name][1], - kwargs=self.targets[name][2], daemon=True) + process = Process( + target=self.targets[name][0], + args=self.targets[name][1], + kwargs=self.targets[name][2], + daemon=True, + ) self.processes[name] = process process.start() @@ -133,7 +147,9 @@ class ProcessManager: for name in self.targets: logger.debug(f"Starting process {name}") - threading.Thread(target=self._run_process, args=(name, ), daemon=True).start() + threading.Thread( + target=self._run_process, args=(name,), daemon=True + ).start() def add_target(self, name: str, target: TARGET_FUNC, args: tuple = (), kwargs=None): """ @@ -154,10 +170,14 @@ class ProcessManager: passive=chan_passive, channel_deliver_active=channel_deliver_active_channel, channel_deliver_passive=channel_deliver_passive_channel, - publish=publish_channel + publish=publish_channel, ) - self.targets[name] = (_delivery_channel_wrapper, (target, channel_deliver, shared_memory, *args), kwargs) + self.targets[name] = ( + _delivery_channel_wrapper, + (target, channel_deliver, shared_memory, *args), + kwargs, + ) # 主进程通道 def join_all(self): @@ -199,3 +219,54 @@ class ProcessManager: if name not in self.targets: logger.warning(f"Process {name} not found.") return self.processes[name].is_alive() + + +# new version + + +class _SubProcessManager: + + def __init__(self): + self.processes: dict[str, SubProcess] = {} + + def new_process( + self, name: str, *args, **kwargs + ) -> Callable[[ProcessFuncType], None]: + def decorator(func: ProcessFuncType): + self.processes[name] = SubProcess(name, func, *args, **kwargs) + + return decorator + + def add(self, name: str, func: ProcessFuncType, *args, **kwargs): + """ + 添加子进程 + Args: + func: 子进程函数 + name: 子进程名称 + args: 子进程函数参数 + kwargs: 子进程函数关键字参数 + Returns: + """ + self.processes[name] = SubProcess(name, func, *args, **kwargs) + + def start(self, name: str): + """ + 启动指定子进程 + Args: + name: 子进程名称 + Returns: + """ + if name not in self.processes: + raise KeyError(f"Process {name} not found.") + self.processes[name].start() + + def start_all(self): + """ + 启动所有子进程 + """ + for name, process in self.processes.items(): + process.start() + logger.debug(f"Starting process {name}") + + +sub_process_manager = _SubProcessManager() diff --git a/liteyuki/plugin/load.py b/liteyuki/plugin/load.py index a18cabe4..7cdeb90d 100644 --- a/liteyuki/plugin/load.py +++ b/liteyuki/plugin/load.py @@ -60,7 +60,6 @@ def load_plugin(module_path: str | Path) -> Optional[Plugin]: f"{metadata.name}({module.__name__.split('.')[-1]})", metadata.type ) else: - logger.opt(colors=True).warning( f'The metadata of Liteyuki plugin "{module.__name__}" is not specified, use empty.' ) diff --git a/pdm.lock b/pdm.lock new file mode 100644 index 00000000..d4119848 --- /dev/null +++ b/pdm.lock @@ -0,0 +1,510 @@ +# This file is @generated by PDM. +# It is not intended for manual editing. + +[metadata] +groups = ["default", "dev"] +strategy = ["inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:ca1b7f108fb7c5bc51977cf165511cbc7a95f64bc4effd6b6597f60bf893ba77" + +[[metadata.targets]] +requires_python = ">=3.10" + +[[package]] +name = "annotated-types" +version = "0.7.0" +requires_python = ">=3.8" +summary = "Reusable constraint types to use with typing.Annotated" +groups = ["default"] +dependencies = [ + "typing-extensions>=4.0.0; python_version < \"3.9\"", +] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "black" +version = "24.10.0" +requires_python = ">=3.9" +summary = "The uncompromising code formatter." +groups = ["dev"] +dependencies = [ + "click>=8.0.0", + "mypy-extensions>=0.4.3", + "packaging>=22.0", + "pathspec>=0.9.0", + "platformdirs>=2", + "tomli>=1.1.0; python_version < \"3.11\"", + "typing-extensions>=4.0.1; python_version < \"3.11\"", +] +files = [ + {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, + {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, + {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, + {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, + {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, + {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, + {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, + {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, + {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, + {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, + {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, + {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, + {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, + {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, + {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, + {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, + {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, + {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, +] + +[[package]] +name = "click" +version = "8.1.7" +requires_python = ">=3.7" +summary = "Composable command line interface toolkit" +groups = ["dev"] +dependencies = [ + "colorama; platform_system == \"Windows\"", + "importlib-metadata; python_version < \"3.8\"", +] +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +summary = "Cross-platform colored terminal text." +groups = ["default", "dev"] +marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "croterline" +version = "1.0.3" +requires_python = ">=3.10" +summary = "Default template for PDM package" +groups = ["default"] +dependencies = [ + "magicoca>=1.0.1", +] +files = [ + {file = "croterline-1.0.3-py3-none-any.whl", hash = "sha256:e934041248bba97382cc522c658d6c4f507dbcfe751e90a1d4cb3076b530e99b"}, + {file = "croterline-1.0.3.tar.gz", hash = "sha256:eb3874a96ed06d98fe210731ad9352a854df81218fb2c25e707e2b641b6daffb"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +requires_python = ">=3.7" +summary = "Backport of PEP 654 (exception groups)" +groups = ["dev"] +marker = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +requires_python = ">=3.7" +summary = "brain-dead simple config-ini parsing" +groups = ["dev"] +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "loguru" +version = "0.7.2" +requires_python = ">=3.5" +summary = "Python logging made (stupidly) simple" +groups = ["default"] +dependencies = [ + "aiocontextvars>=0.2.0; python_version < \"3.7\"", + "colorama>=0.3.4; sys_platform == \"win32\"", + "win32-setctime>=1.0.0; sys_platform == \"win32\"", +] +files = [ + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, +] + +[[package]] +name = "magicoca" +version = "1.0.1" +requires_python = ">=3.10" +summary = "A communication library for Python" +groups = ["default"] +files = [ + {file = "magicoca-1.0.1-py3-none-any.whl", hash = "sha256:69e04be77f9c02d3d0730dc4e739246f4bdefee8b78631040b464cd98cdde51c"}, + {file = "magicoca-1.0.1.tar.gz", hash = "sha256:0dbc9a35609db92ec79076f7126566c1e71bd4b853909ecbad9221dcc7fd6f31"}, +] + +[[package]] +name = "mypy" +version = "1.11.2" +requires_python = ">=3.8" +summary = "Optional static typing for Python" +groups = ["dev"] +dependencies = [ + "mypy-extensions>=1.0.0", + "tomli>=1.1.0; python_version < \"3.11\"", + "typing-extensions>=4.6.0", +] +files = [ + {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, + {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, + {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, + {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, + {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, + {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, + {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, + {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, + {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, + {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, + {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, + {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +requires_python = ">=3.5" +summary = "Type system extensions for programs checked with the mypy type checker." +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "24.1" +requires_python = ">=3.8" +summary = "Core utilities for Python packages" +groups = ["dev"] +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +requires_python = ">=3.8" +summary = "Utility library for gitignore style pattern matching of file paths." +groups = ["dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pdm-backend" +version = "2.4.2" +requires_python = ">=3.8" +summary = "The build backend used by PDM that supports latest packaging standards" +groups = ["default"] +dependencies = [ + "importlib-metadata>=3.6; python_version < \"3.10\"", +] +files = [ + {file = "pdm_backend-2.4.2-py3-none-any.whl", hash = "sha256:8537a3273b19d6448eb07a4a1a92dedc0b60935344a037729ada7be33b5f71ad"}, + {file = "pdm_backend-2.4.2.tar.gz", hash = "sha256:1f833e527ae172f34b4b84e2fcf1f65859a2a5ca746e496d8313b3ea6539969f"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +requires_python = ">=3.8" +summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +groups = ["dev"] +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +requires_python = ">=3.8" +summary = "plugin and hook calling mechanisms for python" +groups = ["dev"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[[package]] +name = "pydantic" +version = "2.9.2" +requires_python = ">=3.8" +summary = "Data validation using Python type hints" +groups = ["default"] +dependencies = [ + "annotated-types>=0.6.0", + "pydantic-core==2.23.4", + "typing-extensions>=4.12.2; python_version >= \"3.13\"", + "typing-extensions>=4.6.1; python_version < \"3.13\"", +] +files = [ + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, +] + +[[package]] +name = "pydantic-core" +version = "2.23.4" +requires_python = ">=3.8" +summary = "Core functionality for Pydantic validation and serialization" +groups = ["default"] +dependencies = [ + "typing-extensions!=4.7.0,>=4.6.0", +] +files = [ + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, +] + +[[package]] +name = "pytest" +version = "8.3.3" +requires_python = ">=3.8" +summary = "pytest: simple powerful testing with Python" +groups = ["dev"] +dependencies = [ + "colorama; sys_platform == \"win32\"", + "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", + "iniconfig", + "packaging", + "pluggy<2,>=1.5", + "tomli>=1; python_version < \"3.11\"", +] +files = [ + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +requires_python = ">=3.8" +summary = "YAML parser and emitter for Python" +groups = ["default"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +requires_python = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +summary = "Python Library for Tom's Obvious, Minimal Language" +groups = ["default"] +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.2" +requires_python = ">=3.8" +summary = "A lil' TOML parser" +groups = ["dev"] +marker = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +requires_python = ">=3.8" +summary = "Backported and Experimental Type Hints for Python 3.8+" +groups = ["default", "dev"] +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "uv" +version = "0.4.20" +requires_python = ">=3.8" +summary = "An extremely fast Python package and project manager, written in Rust." +groups = ["dev"] +files = [ + {file = "uv-0.4.20-py3-none-linux_armv6l.whl", hash = "sha256:d0566f3ce596b0192099f7a01be08e1f37061d7399e0128804794cf83cdf2806"}, + {file = "uv-0.4.20-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:1f20251b5a6a1cc92d844153b128b346bd0be8178beb4945df63d1a76a905176"}, + {file = "uv-0.4.20-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d37f02ae48540104d9c13d2dfe27bf84b246d5945b55d91568404da08e2a3bd8"}, + {file = "uv-0.4.20-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:555f0275c3db5b1cd13f6a6825b0b0f23e116a58a46da65f55d4f07915b36b16"}, + {file = "uv-0.4.20-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6faba47d13c1b916bfe9a1828a792ba21558871b4b81dbb79c157077f558fb3"}, + {file = "uv-0.4.20-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:092d4d3cee4a9680832c16d5c1a5e816b2d07a31328580f04e4ddf437821b1f3"}, + {file = "uv-0.4.20-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5d62655450d173a4dbe76b70b9af81ffa501501d97224f311f126b30924b42f7"}, + {file = "uv-0.4.20-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:865c5fbc2ebe73b4f4b71cbcc1b1bae90a335b15f6eaa9fa6495f77a6e86455e"}, + {file = "uv-0.4.20-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a65eaec88b084094f5b08c2ad73f0ae972f7d6afd0d3ee1d0eb29a76c010a39b"}, + {file = "uv-0.4.20-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8e3492d5f1613e88201b6f68a2e5fba48b0bdbe0f11179df9b222e9dd8d89d3"}, + {file = "uv-0.4.20-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:8ec4a7d0ab131ea749702d4885ff0f6734e1aca1dc26ebbc1c7c67969ba3c0fc"}, + {file = "uv-0.4.20-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:588aedc47fe02f8cf0dfe0dec3fd5e1f3a707fdf674964b3d31f0523351db9d2"}, + {file = "uv-0.4.20-py3-none-musllinux_1_1_i686.whl", hash = "sha256:309539e9b29f3fbbedb3835297a324a9206b42005e15b0af3fa73343ab966349"}, + {file = "uv-0.4.20-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:8ad94fb135bec5c061ba21b1f081f349c3de2b0f8660e168e5afc829d3069e6d"}, + {file = "uv-0.4.20-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:74f78748e72893a674351ca9d708003629ddc1a00bc51100c901b5d47db73e43"}, + {file = "uv-0.4.20-py3-none-win32.whl", hash = "sha256:dbf454b6f56f9181886426c7aed7a8dfc8258f80082365fe99b2044ff92261ba"}, + {file = "uv-0.4.20-py3-none-win_amd64.whl", hash = "sha256:653bfec188d199384451804a6c055fb1d28662adfee7697fe7108c6fb78924ba"}, + {file = "uv-0.4.20.tar.gz", hash = "sha256:b4c8a2027b1f19f8b8949132e728a750e4f9b4bb0ec02544d9b21df3f525ab1a"}, +] + +[[package]] +name = "watchdog" +version = "5.0.3" +requires_python = ">=3.9" +summary = "Filesystem events monitoring" +groups = ["default"] +files = [ + {file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:85527b882f3facda0579bce9d743ff7f10c3e1e0db0a0d0e28170a7d0e5ce2ea"}, + {file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:53adf73dcdc0ef04f7735066b4a57a4cd3e49ef135daae41d77395f0b5b692cb"}, + {file = "watchdog-5.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e25adddab85f674acac303cf1f5835951345a56c5f7f582987d266679979c75b"}, + {file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f01f4a3565a387080dc49bdd1fefe4ecc77f894991b88ef927edbfa45eb10818"}, + {file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91b522adc25614cdeaf91f7897800b82c13b4b8ac68a42ca959f992f6990c490"}, + {file = "watchdog-5.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d52db5beb5e476e6853da2e2d24dbbbed6797b449c8bf7ea118a4ee0d2c9040e"}, + {file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:94d11b07c64f63f49876e0ab8042ae034674c8653bfcdaa8c4b32e71cfff87e8"}, + {file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:349c9488e1d85d0a58e8cb14222d2c51cbc801ce11ac3936ab4c3af986536926"}, + {file = "watchdog-5.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:53a3f10b62c2d569e260f96e8d966463dec1a50fa4f1b22aec69e3f91025060e"}, + {file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:950f531ec6e03696a2414b6308f5c6ff9dab7821a768c9d5788b1314e9a46ca7"}, + {file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae6deb336cba5d71476caa029ceb6e88047fc1dc74b62b7c4012639c0b563906"}, + {file = "watchdog-5.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1021223c08ba8d2d38d71ec1704496471ffd7be42cfb26b87cd5059323a389a1"}, + {file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:90a67d7857adb1d985aca232cc9905dd5bc4803ed85cfcdcfcf707e52049eda7"}, + {file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:720ef9d3a4f9ca575a780af283c8fd3a0674b307651c1976714745090da5a9e8"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dd021efa85970bd4824acacbb922066159d0f9e546389a4743d56919b6758b91"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_armv7l.whl", hash = "sha256:78864cc8f23dbee55be34cc1494632a7ba30263951b5b2e8fc8286b95845f82c"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_i686.whl", hash = "sha256:1e9679245e3ea6498494b3028b90c7b25dbb2abe65c7d07423ecfc2d6218ff7c"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64.whl", hash = "sha256:9413384f26b5d050b6978e6fcd0c1e7f0539be7a4f1a885061473c5deaa57221"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:294b7a598974b8e2c6123d19ef15de9abcd282b0fbbdbc4d23dfa812959a9e05"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_s390x.whl", hash = "sha256:26dd201857d702bdf9d78c273cafcab5871dd29343748524695cecffa44a8d97"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:0f9332243355643d567697c3e3fa07330a1d1abf981611654a1f2bf2175612b7"}, + {file = "watchdog-5.0.3-py3-none-win32.whl", hash = "sha256:c66f80ee5b602a9c7ab66e3c9f36026590a0902db3aea414d59a2f55188c1f49"}, + {file = "watchdog-5.0.3-py3-none-win_amd64.whl", hash = "sha256:f00b4cf737f568be9665563347a910f8bdc76f88c2970121c86243c8cfdf90e9"}, + {file = "watchdog-5.0.3-py3-none-win_ia64.whl", hash = "sha256:49f4d36cb315c25ea0d946e018c01bb028048023b9e103d3d3943f58e109dd45"}, + {file = "watchdog-5.0.3.tar.gz", hash = "sha256:108f42a7f0345042a854d4d0ad0834b741d421330d5f575b81cb27b883500176"}, +] + +[[package]] +name = "win32-setctime" +version = "1.1.0" +requires_python = ">=3.5" +summary = "A small Python utility to set file creation time on Windows" +groups = ["default"] +marker = "sys_platform == \"win32\"" +files = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] diff --git a/pyproject.toml b/pyproject.toml index 56dd0ee7..0e8bc121 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,17 +10,18 @@ readme = "README.md" requires-python = ">=3.10" authors = [ { name = "snowykami", email = "snowykami@outlook.com" }, - { name = "LiteyukiStudio", email = "studio@liteyuki.icu" }, ] license = { text = "MIT&LSO" } dependencies = [ "loguru~=0.7.2", - "pydantic==2.8.2", - "PyYAML==6.0.2", - "toml==0.10.2", - "watchdog==4.0.1", - "pdm-backend==2.3.3" + "pydantic>=2.8.2", + "PyYAML>=6.0.2", + "toml>=0.10.2", + "watchdog>=4.0.1", + "pdm-backend>=2.3.3", + "magicoca>=1.0.1", + "croterline>=1.0.3" ] [project.urls] @@ -38,5 +39,14 @@ includes = ["liteyuki/", "LICENSE", "README.md"] excludes = ["tests/", "docs/", "src/"] [tool.pdm.version] -source = "file" -path = "liteyuki/__init__.py" \ No newline at end of file +source = "scm" +tag_filter = "v*" +tag_regex = '^v(?:\D*)?(?P([1-9][0-9]*!)?(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))*((a|b|c|rc)(0|[1-9][0-9]*))?(\.post(0|[1-9][0-9]*))?(\.dev(0|[1-9][0-9]*))?$)$' + +[tool.pdm.dev-dependencies] +dev = [ + "pytest>=8.3.3", + "black>=24.10.0", + "uv>=0.4.20", + "mypy>=1.11.2", +] diff --git a/requirements.txt b/requirements.txt index 046b3b9c..2bfdb3c4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,7 @@ aiohttp>=3.9.3 aiofiles>=23.2.1 colored>=2.2.4 +croterline>=1.0.3 GitPython>=3.1.43 httpx>=0.27.0 nonebot-plugin-htmlrender>=0.1.0 @@ -25,4 +26,9 @@ toml>=0.10.2 importlib_metadata>=7.0.2 watchdog>=4.0.0 jieba>=0.42.1 -python-dotenv>=1.0.1 \ No newline at end of file +python-dotenv>=1.0.1 +loguru~=0.7.2 +pydantic~=2.9.2 +pip~=23.2.1 +fastapi~=0.115.0 +magicoca~=1.0.1 \ No newline at end of file diff --git a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/__init__.py b/src/liteyuki_plugins/liteyukibot_plugin_nonebot/__init__.py deleted file mode 100644 index a0c6addd..00000000 --- a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/__init__.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Copyright (C) 2020-2024 LiteyukiStudio. All Rights Reserved - -@Time : 2024/8/11 下午5:24 -@Author : snowykami -@Email : snowykami@outlook.com -@File : __init__.py.py -@Software: PyCharm -""" - -import nonebot -from liteyuki.utils import IS_MAIN_PROCESS -from liteyuki.plugin import PluginMetadata, PluginType -from .nb_utils import adapter_manager, driver_manager # type: ignore -from liteyuki.log import logger - -__plugin_meta__ = PluginMetadata( - name="NoneBot2启动器", - type=PluginType.APPLICATION, -) - - -def nb_run(*args, **kwargs): - """ - 初始化NoneBot并运行在子进程 - Args: - **kwargs: - - Returns: - """ - # 给子进程传递通道对象 - kwargs.update(kwargs.get("nonebot", {})) # nonebot配置优先 - nonebot.init(**kwargs) - - driver_manager.init(config=kwargs) - adapter_manager.init(kwargs) - adapter_manager.register() - - try: - # nonebot.load_plugin("nonebot-plugin-lnpm") # 尝试加载轻雪NoneBot插件加载器(Nonebot插件) - nonebot.load_plugin("src.liteyuki_main") # 尝试加载轻雪主插件(Nonebot插件) - except Exception as e: - pass - nonebot.run() - - -if IS_MAIN_PROCESS: - from liteyuki import get_bot - from .dev_reloader import * - - liteyuki = get_bot() - liteyuki.process_manager.add_target(name="nonebot", target=nb_run, args=(), kwargs=liteyuki.config) diff --git a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/dev_reloader.py b/src/liteyuki_plugins/liteyukibot_plugin_nonebot/dev_reloader.py deleted file mode 100644 index e299c16b..00000000 --- a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/dev_reloader.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -""" -NoneBot 开发环境重载监视器 -""" -import os.path - -from liteyuki.dev import observer -from liteyuki import get_bot, logger -from liteyuki.utils import IS_MAIN_PROCESS -from watchdog.events import FileSystemEvent - - -liteyuki = get_bot() - -exclude_extensions = (".pyc", ".pyo") - - -@observer.on_file_system_event( - directories=("src/nonebot_plugins",), - event_filter=lambda event: not event.src_path.endswith(exclude_extensions) and ("__pycache__" not in event.src_path ) and os.path.isfile(event.src_path) -) -def restart_nonebot_process(event: FileSystemEvent): - logger.debug(f"File {event.src_path} changed, reloading nonebot...") - liteyuki.restart_process("nonebot") \ No newline at end of file diff --git a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/adapter_manager/__init__.py b/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/adapter_manager/__init__.py deleted file mode 100644 index daf48023..00000000 --- a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/adapter_manager/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -from . import ( - satori, - onebot -) - - -def init(config: dict): - onebot.init() - satori.init(config) - - -def register(): - onebot.register() - satori.register() diff --git a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/adapter_manager/onebot.py b/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/adapter_manager/onebot.py deleted file mode 100644 index 23d2a59c..00000000 --- a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/adapter_manager/onebot.py +++ /dev/null @@ -1,12 +0,0 @@ -import nonebot -from nonebot.adapters.onebot import v11, v12 - - -def init(): - pass - - -def register(): - driver = nonebot.get_driver() - driver.register_adapter(v11.Adapter) - driver.register_adapter(v12.Adapter) diff --git a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/adapter_manager/satori.py b/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/adapter_manager/satori.py deleted file mode 100644 index 7778296d..00000000 --- a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/adapter_manager/satori.py +++ /dev/null @@ -1,26 +0,0 @@ -import json -import os - -import nonebot -from nonebot.adapters import satori - - -def init(config: dict): - if config.get("satori", None) is None: - nonebot.logger.info("Satori config not found, skip Satori init.") - return None - satori_config = config.get("satori") - if not satori_config.get("enable", False): - nonebot.logger.info("Satori not enabled, skip Satori init.") - return None - if os.getenv("SATORI_CLIENTS", None) is not None: - nonebot.logger.info("Satori clients already set in environment variable, skip.") - os.environ["SATORI_CLIENTS"] = json.dumps(satori_config.get("hosts", []), ensure_ascii=False) - config['satori_clients'] = satori_config.get("hosts", []) - return - - -def register(): - if os.getenv("SATORI_CLIENTS", None) is not None: - driver = nonebot.get_driver() - driver.register_adapter(satori.Adapter) diff --git a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/driver_manager/__init__.py b/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/driver_manager/__init__.py deleted file mode 100644 index 89d90097..00000000 --- a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/driver_manager/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .auto_set_env import auto_set_env - - -def init(config: dict): - auto_set_env(config) - return diff --git a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/driver_manager/auto_set_env.py b/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/driver_manager/auto_set_env.py deleted file mode 100644 index 506ca074..00000000 --- a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/driver_manager/auto_set_env.py +++ /dev/null @@ -1,20 +0,0 @@ -import os - -import dotenv -import nonebot - -from .defines import * - - -def auto_set_env(config: dict): - dotenv.load_dotenv(".env") - if os.getenv("DRIVER", None) is not None: - nonebot.logger.info("Driver already set in environment variable, skip auto configure.") - return - if config.get("satori", {'enable': False}).get("enable", False): - os.environ["DRIVER"] = get_driver_string(ASGI_DRIVER, HTTPX_DRIVER, WEBSOCKETS_DRIVER) - nonebot.logger.info("Enable Satori, set driver to ASGI+HTTPX+WEBSOCKETS") - else: - os.environ["DRIVER"] = get_driver_string(ASGI_DRIVER) - nonebot.logger.info("Disable Satori, set driver to ASGI") - return diff --git a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/driver_manager/defines.py b/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/driver_manager/defines.py deleted file mode 100644 index b4756bf5..00000000 --- a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/nb_utils/driver_manager/defines.py +++ /dev/null @@ -1,17 +0,0 @@ -ASGI_DRIVER = "~fastapi" -HTTPX_DRIVER = "~httpx" -WEBSOCKETS_DRIVER = "~websockets" - - -def get_driver_string(*argv): - output_string = "" - if ASGI_DRIVER in argv: - output_string += ASGI_DRIVER - for arg in argv: - if arg != ASGI_DRIVER: - output_string = f"{output_string}+{arg}" - return output_string - - -def get_driver_full_string(*argv): - return f"DRIVER={get_driver_string(argv)}" diff --git a/src/liteyuki_plugins/liteyukibot_plugin_nonebot/py.typed b/src/liteyuki_plugins/liteyukibot_plugin_nonebot/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/src/liteyuki_plugins/nonebot/__init__.py b/src/liteyuki_plugins/nonebot/__init__.py new file mode 100644 index 00000000..87711ab2 --- /dev/null +++ b/src/liteyuki_plugins/nonebot/__init__.py @@ -0,0 +1,27 @@ +import os.path +from pathlib import Path + +import nonebot +from croterline.utils import IsMainProcess + +from liteyuki import get_bot +from liteyuki.core import sub_process_manager +from liteyuki.plugin import PluginMetadata, PluginType + +__plugin_meta__ = PluginMetadata( + name="NoneBot2启动器", + type=PluginType.APPLICATION, +) + + +def nb_run(*args, **kwargs): + nonebot.init(**kwargs) + nonebot.load_plugin(Path(os.path.dirname(__file__)) / "np_main") + nonebot.run() + + +if IsMainProcess: + bot = get_bot() + sub_process_manager.add( + name="nonebot", func=nb_run, **bot.config.get("nonebot", {}) + ) diff --git a/src/nonebot_plugins/liteyuki_mctools/__init__.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_mctools/__init__.py similarity index 100% rename from src/nonebot_plugins/liteyuki_mctools/__init__.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_mctools/__init__.py diff --git a/src/nonebot_plugins/liteyuki_pacman/__init__.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_pacman/__init__.py similarity index 100% rename from src/nonebot_plugins/liteyuki_pacman/__init__.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_pacman/__init__.py diff --git a/src/nonebot_plugins/liteyuki_pacman/common.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_pacman/common.py similarity index 100% rename from src/nonebot_plugins/liteyuki_pacman/common.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_pacman/common.py diff --git a/src/nonebot_plugins/liteyuki_pacman/npm.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_pacman/npm.py similarity index 100% rename from src/nonebot_plugins/liteyuki_pacman/npm.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_pacman/npm.py diff --git a/src/nonebot_plugins/liteyuki_pacman/rpm.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_pacman/rpm.py similarity index 100% rename from src/nonebot_plugins/liteyuki_pacman/rpm.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_pacman/rpm.py diff --git a/src/nonebot_plugins/liteyuki_smart_reply/__init__.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_smart_reply/__init__.py similarity index 100% rename from src/nonebot_plugins/liteyuki_smart_reply/__init__.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_smart_reply/__init__.py diff --git a/src/nonebot_plugins/liteyuki_smart_reply/matchers.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_smart_reply/matchers.py similarity index 100% rename from src/nonebot_plugins/liteyuki_smart_reply/matchers.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_smart_reply/matchers.py diff --git a/src/nonebot_plugins/liteyuki_smart_reply/monitors.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_smart_reply/monitors.py similarity index 100% rename from src/nonebot_plugins/liteyuki_smart_reply/monitors.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_smart_reply/monitors.py diff --git a/src/nonebot_plugins/liteyuki_smart_reply/utils.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_smart_reply/utils.py similarity index 100% rename from src/nonebot_plugins/liteyuki_smart_reply/utils.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_smart_reply/utils.py diff --git a/src/nonebot_plugins/liteyuki_statistics/__init__.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/__init__.py similarity index 100% rename from src/nonebot_plugins/liteyuki_statistics/__init__.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/__init__.py diff --git a/src/nonebot_plugins/liteyuki_statistics/common.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/common.py similarity index 100% rename from src/nonebot_plugins/liteyuki_statistics/common.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/common.py diff --git a/src/nonebot_plugins/liteyuki_statistics/data_source.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/data_source.py similarity index 98% rename from src/nonebot_plugins/liteyuki_statistics/data_source.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/data_source.py index 017d6605..eef5e33e 100644 --- a/src/nonebot_plugins/liteyuki_statistics/data_source.py +++ b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/data_source.py @@ -10,7 +10,7 @@ from .common import MessageEventModel, msg_db from src.utils.base.language import Language from src.utils.base.resource import get_path from src.utils.message.string_tool import convert_seconds_to_time -from ...utils.external.logo import get_group_icon, get_user_icon +from src.utils.external.logo import get_group_icon, get_user_icon async def count_msg_by_bot_id(bot_id: str) -> int: diff --git a/src/nonebot_plugins/liteyuki_statistics/stat_matchers.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/stat_matchers.py similarity index 100% rename from src/nonebot_plugins/liteyuki_statistics/stat_matchers.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/stat_matchers.py diff --git a/src/nonebot_plugins/liteyuki_statistics/stat_monitors.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/stat_monitors.py similarity index 100% rename from src/nonebot_plugins/liteyuki_statistics/stat_monitors.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/stat_monitors.py diff --git a/src/nonebot_plugins/liteyuki_statistics/stat_restful_api.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/stat_restful_api.py similarity index 100% rename from src/nonebot_plugins/liteyuki_statistics/stat_restful_api.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/stat_restful_api.py diff --git a/src/nonebot_plugins/liteyuki_statistics/word_cloud/LICENSE b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/word_cloud/LICENSE similarity index 100% rename from src/nonebot_plugins/liteyuki_statistics/word_cloud/LICENSE rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/word_cloud/LICENSE diff --git a/src/nonebot_plugins/liteyuki_statistics/word_cloud/data_source.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/word_cloud/data_source.py similarity index 100% rename from src/nonebot_plugins/liteyuki_statistics/word_cloud/data_source.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_statistics/word_cloud/data_source.py diff --git a/src/nonebot_plugins/liteyuki_status/__init__.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_status/__init__.py similarity index 100% rename from src/nonebot_plugins/liteyuki_status/__init__.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_status/__init__.py diff --git a/src/nonebot_plugins/liteyuki_status/api.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_status/api.py similarity index 100% rename from src/nonebot_plugins/liteyuki_status/api.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_status/api.py diff --git a/src/nonebot_plugins/liteyuki_status/counter_for_satori.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_status/counter_for_satori.py similarity index 100% rename from src/nonebot_plugins/liteyuki_status/counter_for_satori.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_status/counter_for_satori.py diff --git a/src/nonebot_plugins/liteyuki_status/status.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_status/status.py similarity index 100% rename from src/nonebot_plugins/liteyuki_status/status.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_status/status.py diff --git a/src/nonebot_plugins/liteyuki_uniblacklist/__init__.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_uniblacklist/__init__.py similarity index 100% rename from src/nonebot_plugins/liteyuki_uniblacklist/__init__.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_uniblacklist/__init__.py diff --git a/src/nonebot_plugins/liteyuki_uniblacklist/api.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_uniblacklist/api.py similarity index 100% rename from src/nonebot_plugins/liteyuki_uniblacklist/api.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_uniblacklist/api.py diff --git a/src/nonebot_plugins/liteyuki_user/__init__.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_user/__init__.py similarity index 100% rename from src/nonebot_plugins/liteyuki_user/__init__.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_user/__init__.py diff --git a/src/nonebot_plugins/liteyuki_user/const.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_user/const.py similarity index 100% rename from src/nonebot_plugins/liteyuki_user/const.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_user/const.py diff --git a/src/nonebot_plugins/liteyuki_user/input_handle.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_user/input_handle.py similarity index 100% rename from src/nonebot_plugins/liteyuki_user/input_handle.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_user/input_handle.py diff --git a/src/nonebot_plugins/liteyuki_user/profile_manager.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_user/profile_manager.py similarity index 100% rename from src/nonebot_plugins/liteyuki_user/profile_manager.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_user/profile_manager.py diff --git a/src/nonebot_plugins/liteyuki_weather/__init__.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_weather/__init__.py similarity index 91% rename from src/nonebot_plugins/liteyuki_weather/__init__.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_weather/__init__.py index 2127a6ac..90212c4a 100644 --- a/src/nonebot_plugins/liteyuki_weather/__init__.py +++ b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_weather/__init__.py @@ -15,7 +15,7 @@ __plugin_meta__ = PluginMetadata( } ) -from ...utils.base.data_manager import set_memory_data +from src.utils.base.data_manager import set_memory_data driver = get_driver() diff --git a/src/nonebot_plugins/liteyuki_weather/qw_api.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_weather/qw_api.py similarity index 98% rename from src/nonebot_plugins/liteyuki_weather/qw_api.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_weather/qw_api.py index 8c641fd4..54ff4b35 100644 --- a/src/nonebot_plugins/liteyuki_weather/qw_api.py +++ b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_weather/qw_api.py @@ -3,8 +3,8 @@ import aiohttp from .qw_models import * import httpx -from ...utils.base.data_manager import get_memory_data -from ...utils.base.language import Language +from src.utils.base.data_manager import get_memory_data +from src.utils.base.language import Language dev_url = "https://devapi.qweather.com/" # 开发HBa com_url = "https://api.qweather.com/" # 正式环境 diff --git a/src/nonebot_plugins/liteyuki_weather/qw_models.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_weather/qw_models.py similarity index 100% rename from src/nonebot_plugins/liteyuki_weather/qw_models.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_weather/qw_models.py diff --git a/src/nonebot_plugins/liteyuki_weather/qweather.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_weather/qweather.py similarity index 100% rename from src/nonebot_plugins/liteyuki_weather/qweather.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/liteyuki_weather/qweather.py diff --git a/src/nonebot_plugins/packmanv2/__init__.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/packmanv2/__init__.py similarity index 100% rename from src/nonebot_plugins/packmanv2/__init__.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/packmanv2/__init__.py diff --git a/src/nonebot_plugins/packmanv2/handle.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/packmanv2/handle.py similarity index 100% rename from src/nonebot_plugins/packmanv2/handle.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/packmanv2/handle.py diff --git a/src/nonebot_plugins/packmanv2/npm/__init__.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/packmanv2/npm/__init__.py similarity index 100% rename from src/nonebot_plugins/packmanv2/npm/__init__.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/packmanv2/npm/__init__.py diff --git a/src/nonebot_plugins/packmanv2/npm/data_source.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/packmanv2/npm/data_source.py similarity index 100% rename from src/nonebot_plugins/packmanv2/npm/data_source.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/packmanv2/npm/data_source.py diff --git a/src/nonebot_plugins/packmanv2/rpm/__init__.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/packmanv2/rpm/__init__.py similarity index 100% rename from src/nonebot_plugins/packmanv2/rpm/__init__.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/packmanv2/rpm/__init__.py diff --git a/src/nonebot_plugins/to_liteyuki.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/to_liteyuki.py similarity index 100% rename from src/nonebot_plugins/to_liteyuki.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/to_liteyuki.py diff --git a/src/nonebot_plugins/webdash/__init__.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/webdash/__init__.py similarity index 100% rename from src/nonebot_plugins/webdash/__init__.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/webdash/__init__.py diff --git a/src/nonebot_plugins/webdash/common.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/webdash/common.py similarity index 100% rename from src/nonebot_plugins/webdash/common.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/webdash/common.py diff --git a/src/nonebot_plugins/webdash/main.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/webdash/main.py similarity index 100% rename from src/nonebot_plugins/webdash/main.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/webdash/main.py diff --git a/src/nonebot_plugins/webdash/restful_api.py b/src/liteyuki_plugins/nonebot/nonebot_plugins/webdash/restful_api.py similarity index 100% rename from src/nonebot_plugins/webdash/restful_api.py rename to src/liteyuki_plugins/nonebot/nonebot_plugins/webdash/restful_api.py diff --git a/src/liteyuki_main/__init__.py b/src/liteyuki_plugins/nonebot/np_main/__init__.py similarity index 86% rename from src/liteyuki_main/__init__.py rename to src/liteyuki_plugins/nonebot/np_main/__init__.py index 972a3f2b..91778719 100644 --- a/src/liteyuki_main/__init__.py +++ b/src/liteyuki_plugins/nonebot/np_main/__init__.py @@ -1,20 +1,20 @@ -from nonebot.plugin import PluginMetadata - -from .core import * -from .loader import * -__author__ = "snowykami" -__plugin_meta__ = PluginMetadata( - name="轻雪核心插件", - description="轻雪主程序插件,包含了许多初始化的功能", - usage="", - homepage="https://github.com/snowykami/LiteyukiBot", - extra={ - "liteyuki" : True, - "toggleable": False, - } -) - -from ..utils.base.language import Language, get_default_lang_code - -sys_lang = Language(get_default_lang_code()) +from nonebot.plugin import PluginMetadata + +from .core import * +from .loader import * +__author__ = "snowykami" +__plugin_meta__ = PluginMetadata( + name="轻雪核心插件", + description="轻雪主程序插件,包含了许多初始化的功能", + usage="", + homepage="https://github.com/snowykami/LiteyukiBot", + extra={ + "liteyuki" : True, + "toggleable": False, + } +) + +from src.utils.base.language import Language, get_default_lang_code + +sys_lang = Language(get_default_lang_code()) nonebot.logger.info(sys_lang.get("main.current_language", LANG=sys_lang.get("language.name"))) \ No newline at end of file diff --git a/src/liteyuki_main/api.py b/src/liteyuki_plugins/nonebot/np_main/api.py similarity index 96% rename from src/liteyuki_main/api.py rename to src/liteyuki_plugins/nonebot/np_main/api.py index 56fa7864..fb9657a2 100644 --- a/src/liteyuki_main/api.py +++ b/src/liteyuki_plugins/nonebot/np_main/api.py @@ -1,47 +1,47 @@ -import nonebot -from git import Repo - -from src.utils.base.config import get_config - -remote_urls = [ - "https://github.com/LiteyukiStudio/LiteyukiBot.git", - "https://gitee.com/snowykami/LiteyukiBot.git" -] - - -def detect_update() -> bool: - # 对每个远程仓库进行检查,只要有一个仓库有更新,就返回True - for remote_url in remote_urls: - repo = Repo(".") - repo.remotes.origin.set_url(remote_url) - repo.remotes.origin.fetch() - if repo.head.commit != repo.commit('origin/main'): - return True - - -def update_liteyuki() -> tuple[bool, str]: - """更新轻雪 - :return: 是否更新成功,更新变动""" - - if get_config("allow_update", True): - new_commit_detected = detect_update() - if new_commit_detected: - repo = Repo(".") - logs = "" - # 对每个远程仓库进行更新 - for remote_url in remote_urls: - try: - logs += f"\nremote: {remote_url}" - repo.remotes.origin.set_url(remote_url) - repo.remotes.origin.pull() - diffs = repo.head.commit.diff("origin/main") - for diff in diffs.iter_change_type('M'): - logs += f"\n{diff.a_path}" - return True, logs - except: - continue - else: - return False, "Nothing Changed" - - else: - raise PermissionError("Update is not allowed.") +import nonebot +from git import Repo + +from src.utils.base.config import get_config + +remote_urls = [ + "https://github.com/LiteyukiStudio/LiteyukiBot.git", + "https://gitee.com/snowykami/LiteyukiBot.git" +] + + +def detect_update() -> bool: + # 对每个远程仓库进行检查,只要有一个仓库有更新,就返回True + for remote_url in remote_urls: + repo = Repo(".") + repo.remotes.origin.set_url(remote_url) + repo.remotes.origin.fetch() + if repo.head.commit != repo.commit('origin/main'): + return True + + +def update_liteyuki() -> tuple[bool, str]: + """更新轻雪 + :return: 是否更新成功,更新变动""" + + if get_config("allow_update", True): + new_commit_detected = detect_update() + if new_commit_detected: + repo = Repo(".") + logs = "" + # 对每个远程仓库进行更新 + for remote_url in remote_urls: + try: + logs += f"\nremote: {remote_url}" + repo.remotes.origin.set_url(remote_url) + repo.remotes.origin.pull() + diffs = repo.head.commit.diff("origin/main") + for diff in diffs.iter_change_type('M'): + logs += f"\n{diff.a_path}" + return True, logs + except: + continue + else: + return False, "Nothing Changed" + + else: + raise PermissionError("Update is not allowed.") diff --git a/src/liteyuki_main/core.py b/src/liteyuki_plugins/nonebot/np_main/core.py similarity index 95% rename from src/liteyuki_main/core.py rename to src/liteyuki_plugins/nonebot/np_main/core.py index 8af6fa21..a576e339 100644 --- a/src/liteyuki_main/core.py +++ b/src/liteyuki_plugins/nonebot/np_main/core.py @@ -1,301 +1,301 @@ -import time -from typing import AnyStr - -import time -from typing import AnyStr - -import nonebot -import pip -from nonebot import get_driver, require -from nonebot.adapters import onebot, satori -from nonebot.adapters.onebot.v11 import Message, unescape -from nonebot.internal.matcher import Matcher -from nonebot.permission import SUPERUSER - -# from src.liteyuki.core import Reloader -from src.utils import event as event_utils, satori_utils -from src.utils.base.config import get_config -from src.utils.base.data_manager import TempConfig, common_db -from src.utils.base.language import get_user_lang -from src.utils.base.ly_typing import T_Bot, T_MessageEvent -from src.utils.message.message import MarkdownMessage as md, broadcast_to_superusers -from .api import update_liteyuki # type: ignore -from ..utils.base import reload # type: ignore -from ..utils.base.ly_function import get_function # type: ignore -from ..utils.message.html_tool import md_to_pic - -require("nonebot_plugin_alconna") -require("nonebot_plugin_apscheduler") -from nonebot_plugin_alconna import UniMessage, on_alconna, Alconna, Args, Arparma, MultiVar -from nonebot_plugin_apscheduler import scheduler - - -driver = get_driver() - - -@on_alconna( - command=Alconna( - "liteecho", - Args["text", str, ""], - ), - permission=SUPERUSER -).handle() -# Satori OK -async def _(bot: T_Bot, matcher: Matcher, result: Arparma): - if text := result.main_args.get("text"): - await matcher.finish(Message(unescape(text))) - else: - await matcher.finish(f"Hello, Liteyuki!\nBot {bot.self_id}") - - -@on_alconna( - aliases={"更新轻雪"}, - command=Alconna( - "update-liteyuki" - ), - permission=SUPERUSER -).handle() -# Satori OK -async def _(bot: T_Bot, event: T_MessageEvent, matcher: Matcher): - # 使用git pull更新 - - ulang = get_user_lang(str(event.user.id if isinstance(event, satori.event.Event) else event.user_id)) - success, logs = update_liteyuki() - reply = "Liteyuki updated!\n" - reply += f"```\n{logs}\n```\n" - btn_restart = md.btn_cmd(ulang.get("liteyuki.restart_now"), "reload-liteyuki") - pip.main(["install", "-r", "requirements.txt"]) - reply += f"{ulang.get('liteyuki.update_restart', RESTART=btn_restart)}" - # await md.send_md(reply, bot) - img_bytes = await md_to_pic(reply) - await UniMessage.send(UniMessage.image(raw=img_bytes)) - - -@on_alconna( - aliases={"重启轻雪"}, - command=Alconna( - "reload-liteyuki" - ), - permission=SUPERUSER -).handle() -# Satori OK -async def _(matcher: Matcher, bot: T_Bot, event: T_MessageEvent): - await matcher.send("Liteyuki reloading") - temp_data = common_db.where_one(TempConfig(), default=TempConfig()) - - temp_data.data.update( - { - "reload" : True, - "reload_time" : time.time(), - "reload_bot_id" : bot.self_id, - "reload_session_type": event_utils.get_message_type(event), - "reload_session_id" : (event.group_id if event.message_type == "group" else event.user_id) - if not isinstance(event, satori.event.Event) else event.chan_active.id, - "delta_time" : 0 - } - ) - - common_db.save(temp_data) - reload() - - -@on_alconna( - command=Alconna( - "liteyuki-docs", - ), - aliases={"轻雪文档"}, -).handle() -# Satori OK -async def _(matcher: Matcher): - await matcher.finish("https://bot.liteyuki.icu/") - - -@on_alconna( - command=Alconna( - "/function", - Args["function", str]["args", MultiVar(str), ()], - ), - permission=SUPERUSER -).handle() -async def _(result: Arparma, bot: T_Bot, event: T_MessageEvent, matcher: Matcher): - """ - 调用轻雪函数 - Args: - result: - bot: - event: - - Returns: - - """ - function_name = result.main_args.get("function") - args: tuple[str] = result.main_args.get("args", ()) - _args = [] - _kwargs = { - "USER_ID" : str(event.user_id), - "GROUP_ID": str(event.group_id) if event.message_type == "group" else "0", - "BOT_ID" : str(bot.self_id) - } - - for arg in args: - arg = arg.replace("\\=", "EQUAL_SIGN") - if "=" in arg: - key, value = arg.split("=", 1) - value = unescape(value.replace("EQUAL_SIGN", "=")) - try: - value = eval(value) - except: - value = value - _kwargs[key] = value - else: - _args.append(arg.replace("EQUAL_SIGN", "=")) - - ly_func = get_function(function_name) - ly_func.bot = bot if "BOT_ID" not in _kwargs else nonebot.get_bot(_kwargs["BOT_ID"]) - ly_func.matcher = matcher - - await ly_func(*tuple(_args), **_kwargs) - - -@on_alconna( - command=Alconna( - "/api", - Args["api", str]["args", MultiVar(AnyStr), ()], - ), - permission=SUPERUSER -).handle() -async def _(result: Arparma, bot: T_Bot, event: T_MessageEvent, matcher: Matcher): - """ - 调用API - Args: - result: - bot: - event: - - Returns: - - """ - api_name = result.main_args.get("api") - args: tuple[str] = result.main_args.get("args", ()) # 类似于url参数,但每个参数间用空格分隔,空格是%20 - args_dict = {} - - for arg in args: - key, value = arg.split("=", 1) - - args_dict[key] = unescape(value.replace("%20", " ")) - - if api_name in need_user_id and "user_id" not in args_dict: - args_dict["user_id"] = str(event.user_id) - if api_name in need_group_id and "group_id" not in args_dict and event.message_type == "group": - args_dict["group_id"] = str(event.group_id) - - if "message" in args_dict: - args_dict["message"] = Message(eval(args_dict["message"])) - - if "messages" in args_dict: - args_dict["messages"] = Message(eval(args_dict["messages"])) - - try: - result = await bot.call_api(api_name, **args_dict) - except Exception as e: - result = str(e) - - args_show = "\n".join("- %s: %s" % (k, v) for k, v in args_dict.items()) - await matcher.finish(f"API: {api_name}\n\nArgs: \n{args_show}\n\nResult: {result}") - - -@driver.on_startup -async def on_startup(): - temp_data = common_db.where_one(TempConfig(), default=TempConfig()) - # 储存重启信息 - if temp_data.data.get("reload", False): - delta_time = time.time() - temp_data.data.get("reload_time", 0) - temp_data.data["delta_time"] = delta_time - common_db.save(temp_data) # 更新数据 - """ - 该部分将迁移至轻雪生命周期 - Returns: - - """ - - -@driver.on_shutdown -async def on_shutdown(): - pass - - -@driver.on_bot_connect -async def _(bot: T_Bot): - temp_data = common_db.where_one(TempConfig(), default=TempConfig()) - if isinstance(bot, satori.Bot): - await satori_utils.user_infos.load_friends(bot) - # 用于重启计时 - if temp_data.data.get("reload", False): - temp_data.data["reload"] = False - reload_bot_id = temp_data.data.get("reload_bot_id", 0) - if reload_bot_id != bot.self_id: - return - reload_session_type = temp_data.data.get("reload_session_type", "private") - reload_session_id = temp_data.data.get("reload_session_id", 0) - delta_time = temp_data.data.get("delta_time", 0) - common_db.save(temp_data) # 更新数据 - - if delta_time <= 20.0: # 启动时间太长就别发了,丢人 - if isinstance(bot, satori.Bot): - await bot.send_message( - channel_id=reload_session_id, - message="Liteyuki reloaded in %.2f s" % delta_time - ) - elif isinstance(bot, onebot.v11.Bot): - await bot.send_msg( - message_type=reload_session_type, - user_id=reload_session_id, - group_id=reload_session_id, - message="Liteyuki reloaded in %.2f s" % delta_time - ) - - elif isinstance(bot, onebot.v12.Bot): - await bot.send_message( - message_type=reload_session_type, - user_id=reload_session_id, - group_id=reload_session_id, - message="Liteyuki reloaded in %.2f s" % delta_time, - detail_type="group" - ) - - -# 每天4点更新 -@scheduler.scheduled_job("cron", hour=4) -async def every_day_update(): - if get_config("auto_update", default=True): - result, logs = update_liteyuki() - pip.main(["install", "-r", "requirements.txt"]) - if result: - await broadcast_to_superusers(f"Liteyuki updated: ```\n{logs}\n```") - nonebot.logger.info(f"Liteyuki updated: {logs}") - reload() - else: - nonebot.logger.info(logs) - - -# 需要用户id的api -need_user_id = ( - "send_private_msg", - "send_msg", - "set_group_card", - "set_group_special_title", - "get_stranger_info", - "get_group_member_info" -) - -need_group_id = ( - "send_group_msg", - "send_msg", - "set_group_card", - "set_group_name", - - "set_group_special_title", - "get_group_member_info", - "get_group_member_list", - "get_group_honor_info" -) +import time +from typing import AnyStr + +import time +from typing import AnyStr + +import nonebot +import pip +from nonebot import get_driver, require +from nonebot.adapters import onebot, satori +from nonebot.adapters.onebot.v11 import Message, unescape +from nonebot.internal.matcher import Matcher +from nonebot.permission import SUPERUSER + +# from src.liteyuki.core import Reloader +from src.utils import event as event_utils, satori_utils +from src.utils.base.config import get_config +from src.utils.base.data_manager import TempConfig, common_db +from src.utils.base.language import get_user_lang +from src.utils.base.ly_typing import T_Bot, T_MessageEvent +from src.utils.message.message import MarkdownMessage as md, broadcast_to_superusers +from .api import update_liteyuki # type: ignore +from src.utils.base import reload # type: ignore +from src.utils.base.ly_function import get_function # type: ignore +from src.utils.message.html_tool import md_to_pic + +require("nonebot_plugin_alconna") +require("nonebot_plugin_apscheduler") +from nonebot_plugin_alconna import UniMessage, on_alconna, Alconna, Args, Arparma, MultiVar +from nonebot_plugin_apscheduler import scheduler + + +driver = get_driver() + + +@on_alconna( + command=Alconna( + "liteecho", + Args["text", str, ""], + ), + permission=SUPERUSER +).handle() +# Satori OK +async def _(bot: T_Bot, matcher: Matcher, result: Arparma): + if text := result.main_args.get("text"): + await matcher.finish(Message(unescape(text))) + else: + await matcher.finish(f"Hello, Liteyuki!\nBot {bot.self_id}") + + +@on_alconna( + aliases={"更新轻雪"}, + command=Alconna( + "update-liteyuki" + ), + permission=SUPERUSER +).handle() +# Satori OK +async def _(bot: T_Bot, event: T_MessageEvent, matcher: Matcher): + # 使用git pull更新 + + ulang = get_user_lang(str(event.user.id if isinstance(event, satori.event.Event) else event.user_id)) + success, logs = update_liteyuki() + reply = "Liteyuki updated!\n" + reply += f"```\n{logs}\n```\n" + btn_restart = md.btn_cmd(ulang.get("liteyuki.restart_now"), "reload-liteyuki") + pip.main(["install", "-r", "requirements.txt"]) + reply += f"{ulang.get('liteyuki.update_restart', RESTART=btn_restart)}" + # await md.send_md(reply, bot) + img_bytes = await md_to_pic(reply) + await UniMessage.send(UniMessage.image(raw=img_bytes)) + + +@on_alconna( + aliases={"重启轻雪"}, + command=Alconna( + "reload-liteyuki" + ), + permission=SUPERUSER +).handle() +# Satori OK +async def _(matcher: Matcher, bot: T_Bot, event: T_MessageEvent): + await matcher.send("Liteyuki reloading") + temp_data = common_db.where_one(TempConfig(), default=TempConfig()) + + temp_data.data.update( + { + "reload" : True, + "reload_time" : time.time(), + "reload_bot_id" : bot.self_id, + "reload_session_type": event_utils.get_message_type(event), + "reload_session_id" : (event.group_id if event.message_type == "group" else event.user_id) + if not isinstance(event, satori.event.Event) else event.chan_active.id, + "delta_time" : 0 + } + ) + + common_db.save(temp_data) + reload() + + +@on_alconna( + command=Alconna( + "liteyuki-docs", + ), + aliases={"轻雪文档"}, +).handle() +# Satori OK +async def _(matcher: Matcher): + await matcher.finish("https://bot.liteyuki.icu/") + + +@on_alconna( + command=Alconna( + "/function", + Args["function", str]["args", MultiVar(str), ()], + ), + permission=SUPERUSER +).handle() +async def _(result: Arparma, bot: T_Bot, event: T_MessageEvent, matcher: Matcher): + """ + 调用轻雪函数 + Args: + result: + bot: + event: + + Returns: + + """ + function_name = result.main_args.get("function") + args: tuple[str] = result.main_args.get("args", ()) + _args = [] + _kwargs = { + "USER_ID" : str(event.user_id), + "GROUP_ID": str(event.group_id) if event.message_type == "group" else "0", + "BOT_ID" : str(bot.self_id) + } + + for arg in args: + arg = arg.replace("\\=", "EQUAL_SIGN") + if "=" in arg: + key, value = arg.split("=", 1) + value = unescape(value.replace("EQUAL_SIGN", "=")) + try: + value = eval(value) + except: + value = value + _kwargs[key] = value + else: + _args.append(arg.replace("EQUAL_SIGN", "=")) + + ly_func = get_function(function_name) + ly_func.bot = bot if "BOT_ID" not in _kwargs else nonebot.get_bot(_kwargs["BOT_ID"]) + ly_func.matcher = matcher + + await ly_func(*tuple(_args), **_kwargs) + + +@on_alconna( + command=Alconna( + "/api", + Args["api", str]["args", MultiVar(AnyStr), ()], + ), + permission=SUPERUSER +).handle() +async def _(result: Arparma, bot: T_Bot, event: T_MessageEvent, matcher: Matcher): + """ + 调用API + Args: + result: + bot: + event: + + Returns: + + """ + api_name = result.main_args.get("api") + args: tuple[str] = result.main_args.get("args", ()) # 类似于url参数,但每个参数间用空格分隔,空格是%20 + args_dict = {} + + for arg in args: + key, value = arg.split("=", 1) + + args_dict[key] = unescape(value.replace("%20", " ")) + + if api_name in need_user_id and "user_id" not in args_dict: + args_dict["user_id"] = str(event.user_id) + if api_name in need_group_id and "group_id" not in args_dict and event.message_type == "group": + args_dict["group_id"] = str(event.group_id) + + if "message" in args_dict: + args_dict["message"] = Message(eval(args_dict["message"])) + + if "messages" in args_dict: + args_dict["messages"] = Message(eval(args_dict["messages"])) + + try: + result = await bot.call_api(api_name, **args_dict) + except Exception as e: + result = str(e) + + args_show = "\n".join("- %s: %s" % (k, v) for k, v in args_dict.items()) + await matcher.finish(f"API: {api_name}\n\nArgs: \n{args_show}\n\nResult: {result}") + + +@driver.on_startup +async def on_startup(): + temp_data = common_db.where_one(TempConfig(), default=TempConfig()) + # 储存重启信息 + if temp_data.data.get("reload", False): + delta_time = time.time() - temp_data.data.get("reload_time", 0) + temp_data.data["delta_time"] = delta_time + common_db.save(temp_data) # 更新数据 + """ + 该部分将迁移至轻雪生命周期 + Returns: + + """ + + +@driver.on_shutdown +async def on_shutdown(): + pass + + +@driver.on_bot_connect +async def _(bot: T_Bot): + temp_data = common_db.where_one(TempConfig(), default=TempConfig()) + if isinstance(bot, satori.Bot): + await satori_utils.user_infos.load_friends(bot) + # 用于重启计时 + if temp_data.data.get("reload", False): + temp_data.data["reload"] = False + reload_bot_id = temp_data.data.get("reload_bot_id", 0) + if reload_bot_id != bot.self_id: + return + reload_session_type = temp_data.data.get("reload_session_type", "private") + reload_session_id = temp_data.data.get("reload_session_id", 0) + delta_time = temp_data.data.get("delta_time", 0) + common_db.save(temp_data) # 更新数据 + + if delta_time <= 20.0: # 启动时间太长就别发了,丢人 + if isinstance(bot, satori.Bot): + await bot.send_message( + channel_id=reload_session_id, + message="Liteyuki reloaded in %.2f s" % delta_time + ) + elif isinstance(bot, onebot.v11.Bot): + await bot.send_msg( + message_type=reload_session_type, + user_id=reload_session_id, + group_id=reload_session_id, + message="Liteyuki reloaded in %.2f s" % delta_time + ) + + elif isinstance(bot, onebot.v12.Bot): + await bot.send_message( + message_type=reload_session_type, + user_id=reload_session_id, + group_id=reload_session_id, + message="Liteyuki reloaded in %.2f s" % delta_time, + detail_type="group" + ) + + +# 每天4点更新 +@scheduler.scheduled_job("cron", hour=4) +async def every_day_update(): + if get_config("auto_update", default=True): + result, logs = update_liteyuki() + pip.main(["install", "-r", "requirements.txt"]) + if result: + await broadcast_to_superusers(f"Liteyuki updated: ```\n{logs}\n```") + nonebot.logger.info(f"Liteyuki updated: {logs}") + reload() + else: + nonebot.logger.info(logs) + + +# 需要用户id的api +need_user_id = ( + "send_private_msg", + "send_msg", + "set_group_card", + "set_group_special_title", + "get_stranger_info", + "get_group_member_info" +) + +need_group_id = ( + "send_group_msg", + "send_msg", + "set_group_card", + "set_group_name", + + "set_group_special_title", + "get_group_member_info", + "get_group_member_list", + "get_group_honor_info" +) diff --git a/src/liteyuki_main/loader.py b/src/liteyuki_plugins/nonebot/np_main/loader.py similarity index 75% rename from src/liteyuki_main/loader.py rename to src/liteyuki_plugins/nonebot/np_main/loader.py index 41b6a591..65da7b25 100644 --- a/src/liteyuki_main/loader.py +++ b/src/liteyuki_plugins/nonebot/np_main/loader.py @@ -1,33 +1,39 @@ -import asyncio - -import nonebot.plugin -from nonebot import get_driver -from src.utils import init_log -from src.utils.base.config import get_config -from src.utils.base.data_manager import InstalledPlugin, plugin_db -from src.utils.base.resource import load_resources -from src.utils.message.tools import check_for_package - -load_resources() -init_log() - -driver = get_driver() - - -@driver.on_startup -async def load_plugins(): - nonebot.plugin.load_plugins("src/nonebot_plugins") - # 从数据库读取已安装的插件 - if not get_config("safe_mode", False): - # 安全模式下,不加载插件 - installed_plugins: list[InstalledPlugin] = plugin_db.where_all(InstalledPlugin()) - if installed_plugins: - for installed_plugin in installed_plugins: - if not check_for_package(installed_plugin.module_name): - nonebot.logger.error( - f"{installed_plugin.module_name} not installed, but still in loader index.") - else: - nonebot.load_plugin(installed_plugin.module_name) - nonebot.plugin.load_plugins("plugins") - else: - nonebot.logger.info("Safe mode is on, no plugin loaded.") +import asyncio +import os.path +from pathlib import Path + +import nonebot.plugin +from nonebot import get_driver +from src.utils import init_log +from src.utils.base.config import get_config +from src.utils.base.data_manager import InstalledPlugin, plugin_db +from src.utils.base.resource import load_resources +from src.utils.message.tools import check_for_package + +load_resources() +init_log() + +driver = get_driver() + + +@driver.on_startup +async def load_plugins(): + print("load from", os.path.join(os.path.dirname(__file__), "../nonebot_plugins")) + nonebot.plugin.load_plugins(os.path.abspath(os.path.join(os.path.dirname(__file__), "../nonebot_plugins"))) + # 从数据库读取已安装的插件 + if not get_config("safe_mode", False): + # 安全模式下,不加载插件 + installed_plugins: list[InstalledPlugin] = plugin_db.where_all( + InstalledPlugin() + ) + if installed_plugins: + for installed_plugin in installed_plugins: + if not check_for_package(installed_plugin.module_name): + nonebot.logger.error( + f"{installed_plugin.module_name} not installed, but still in loader index." + ) + else: + nonebot.load_plugin(installed_plugin.module_name) + nonebot.plugin.load_plugins("plugins") + else: + nonebot.logger.info("Safe mode is on, no plugin loaded.") diff --git a/src/liteyuki_main/uitls.py b/src/liteyuki_plugins/nonebot/np_main/uitls.py similarity index 100% rename from src/liteyuki_main/uitls.py rename to src/liteyuki_plugins/nonebot/np_main/uitls.py diff --git a/src/nonebot_plugins/liteyuki_satori_user_info/__init__.py b/src/nonebot_plugins/liteyuki_satori_user_info/__init__.py deleted file mode 100644 index 5960e12d..00000000 --- a/src/nonebot_plugins/liteyuki_satori_user_info/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -from nonebot.plugin import PluginMetadata -from .auto_update import * - -__author__ = "expliyh" -__plugin_meta__ = PluginMetadata( - name="Satori 用户数据自动更新(临时措施)", - description="", - usage="", - type="application", - homepage="https://github.com/snowykami/LiteyukiBot", - extra={ - "liteyuki": True, - "toggleable" : True, - "default_enable" : True, - } -) diff --git a/src/nonebot_plugins/liteyuki_satori_user_info/auto_update.py b/src/nonebot_plugins/liteyuki_satori_user_info/auto_update.py deleted file mode 100644 index d7631647..00000000 --- a/src/nonebot_plugins/liteyuki_satori_user_info/auto_update.py +++ /dev/null @@ -1,20 +0,0 @@ -import nonebot - -from nonebot.message import event_preprocessor -from src.utils.base.ly_typing import T_MessageEvent -from src.utils import satori_utils -from nonebot.adapters import satori -from nonebot_plugin_alconna.typings import Event -from src.nonebot_plugins.liteyuki_status.counter_for_satori import satori_counter - - -@event_preprocessor -async def pre_handle(event: Event): - if isinstance(event, satori.MessageEvent): - if event.user.id == event.self_id: - satori_counter.msg_sent += 1 - else: - satori_counter.msg_received += 1 - if event.user.name is not None: - if await satori_utils.user_infos.put(event.user): - nonebot.logger.info(f"Satori user {event.user.name}<{event.user.id}> updated") diff --git a/src/utils/__init__.py b/src/utils/__init__.py index 50886ac1..5f0237f5 100644 --- a/src/utils/__init__.py +++ b/src/utils/__init__.py @@ -1,42 +1,42 @@ -import sys - -import nonebot - -__NAME__ = "LiteyukiBot" -__VERSION__ = "6.3.2" # 60201 - -from src.utils.base.config import load_from_yaml, config -from src.utils.base.log import init_log -from git import Repo - -major, minor, patch = map(int, __VERSION__.split(".")) -__VERSION_I__ = major * 10000 + minor * 100 + patch - - -def init(): - """ - 初始化 - Returns: - - """ - # 检测python版本是否高于3.10 - init_log() - if sys.version_info < (3, 10): - nonebot.logger.error("Requires Python3.10+ to run, please upgrade your Python Environment.") - exit(1) - - try: - # 检测git仓库 - repo = Repo(".") - except Exception as e: - nonebot.logger.error(f"Failed to load git repository: {e}, please clone this project from GitHub instead of downloading the zip file.") - - # temp_data: TempConfig = common_db.where_one(TempConfig(), default=TempConfig()) - # temp_data.data["start_time"] = time.time() - # common_db.save(temp_data) - - nonebot.logger.info( - f"Run Liteyuki-NoneBot with Python{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro} " - f"at {sys.executable}" - ) - nonebot.logger.info(f"{__NAME__} {__VERSION__}({__VERSION_I__}) is running") +import sys + +import nonebot + +__NAME__ = "LiteyukiBot" +__VERSION__ = "6.3.2" # 60201 + +from src.utils.base.config import load_from_yaml, config +from src.utils.base.log import init_log +from git import Repo + +major, minor, patch = map(int, __VERSION__.split(".")) +__VERSION_I__ = major * 10000 + minor * 100 + patch + + +def init(): + """ + 初始化 + Returns: + + """ + # 检测python版本是否高于3.10 + init_log() + if sys.version_info < (3, 10): + nonebot.logger.error("Requires Python3.10+ to run, please upgrade your Python Environment.") + exit(1) + + try: + # 检测git仓库 + repo = Repo(".") + except Exception as e: + nonebot.logger.error(f"Failed to load git repository: {e}, please clone this project from GitHub instead of downloading the zip file.") + + # temp_data: TempConfig = common_db.where_one(TempConfig(), default=TempConfig()) + # temp_data.data["start_time"] = time.time() + # common_db.save(temp_data) + + nonebot.logger.info( + f"Run Liteyuki-NoneBot with Python{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro} " + f"at {sys.executable}" + ) + nonebot.logger.info(f"{__NAME__} {__VERSION__}({__VERSION_I__}) is running") diff --git a/src/utils/base/config.py b/src/utils/base/config.py index d518f0f8..08a0d41b 100644 --- a/src/utils/base/config.py +++ b/src/utils/base/config.py @@ -1,109 +1,109 @@ -import os -import platform -from typing import List - -import nonebot -import yaml -from pydantic import BaseModel - -from ..message.tools import random_hex_string - - -config = {} # 全局配置,确保加载后读取 - - -class SatoriNodeConfig(BaseModel): - host: str = "" - port: str = "5500" - path: str = "" - token: str = "" - - -class SatoriConfig(BaseModel): - comment: str = ( - "These features are still in development. Do not enable in production environment." - ) - enable: bool = False - hosts: List[SatoriNodeConfig] = [SatoriNodeConfig()] - - -class BasicConfig(BaseModel): - host: str = "127.0.0.1" - port: int = 20216 - superusers: list[str] = [] - command_start: list[str] = ["/", ""] - nickname: list[str] = [f"LiteyukiBot-{random_hex_string(6)}"] - satori: SatoriConfig = SatoriConfig() - data_path: str = "data/liteyuki" - chromium_path: str = ( - "/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome" # type: ignore - if platform.system() == "Darwin" - else ( - "C:/Program Files (x86)/Microsoft/Edge/Application/msedge.exe" - if platform.system() == "Windows" - else "/usr/bin/chromium-browser" - ) - ) - - -def load_from_yaml(file_: str) -> dict: - global config - nonebot.logger.debug("Loading config from %s" % file_) - if not os.path.exists(file_): - nonebot.logger.warning( - f"Config file {file_} not found, created default config, please modify it and restart" - ) - with open(file_, "w", encoding="utf-8") as f: - yaml.dump(BasicConfig().dict(), f, default_flow_style=False) - - with open(file_, "r", encoding="utf-8") as f: - conf = init_conf(yaml.load(f, Loader=yaml.FullLoader)) - config = conf - if conf is None: - nonebot.logger.warning( - f"Config file {file_} is empty, use default config. please modify it and restart" - ) - conf = BasicConfig().dict() - return conf - - -def get_config(key: str, default=None): - """获取配置项,优先级:bot > config > db > yaml""" - try: - bot = nonebot.get_bot() - except: - bot = None - - if bot is None: - bot_config = {} - else: - bot_config = bot.config.dict() - - if key in bot_config: - return bot_config[key] - - elif key in config: - return config[key] - - elif key in load_from_yaml("config.yml"): - return load_from_yaml("config.yml")[key] - - else: - return default - - -def init_conf(conf: dict) -> dict: - """ - 初始化配置文件,确保配置文件中的必要字段存在,且不会冲突 - Args: - conf: - - Returns: - - """ - # 若command_start中无"",则添加必要命令头,开启alconna_use_command_start防止冲突 - # 以下内容由于issue #53 被注释 - # if "" not in conf.get("command_start", []): - # conf["alconna_use_command_start"] = True - return conf - pass +import os +import platform +from typing import List + +import nonebot +import yaml +from pydantic import BaseModel + +from ..message.tools import random_hex_string + + +config = {} # 全局配置,确保加载后读取 + + +class SatoriNodeConfig(BaseModel): + host: str = "" + port: str = "5500" + path: str = "" + token: str = "" + + +class SatoriConfig(BaseModel): + comment: str = ( + "These features are still in development. Do not enable in production environment." + ) + enable: bool = False + hosts: List[SatoriNodeConfig] = [SatoriNodeConfig()] + + +class BasicConfig(BaseModel): + host: str = "127.0.0.1" + port: int = 20216 + superusers: list[str] = [] + command_start: list[str] = ["/", ""] + nickname: list[str] = [f"LiteyukiBot-{random_hex_string(6)}"] + satori: SatoriConfig = SatoriConfig() + data_path: str = "data/liteyuki" + chromium_path: str = ( + "/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome" # type: ignore + if platform.system() == "Darwin" + else ( + "C:/Program Files (x86)/Microsoft/Edge/Application/msedge.exe" + if platform.system() == "Windows" + else "/usr/bin/chromium-browser" + ) + ) + + +def load_from_yaml(file_: str) -> dict: + global config + nonebot.logger.debug("Loading config from %s" % file_) + if not os.path.exists(file_): + nonebot.logger.warning( + f"Config file {file_} not found, created default config, please modify it and restart" + ) + with open(file_, "w", encoding="utf-8") as f: + yaml.dump(BasicConfig().dict(), f, default_flow_style=False) + + with open(file_, "r", encoding="utf-8") as f: + conf = init_conf(yaml.load(f, Loader=yaml.FullLoader)) + config = conf + if conf is None: + nonebot.logger.warning( + f"Config file {file_} is empty, use default config. please modify it and restart" + ) + conf = BasicConfig().dict() + return conf + + +def get_config(key: str, default=None): + """获取配置项,优先级:bot > config > db > yaml""" + try: + bot = nonebot.get_bot() + except: + bot = None + + if bot is None: + bot_config = {} + else: + bot_config = bot.config.dict() + + if key in bot_config: + return bot_config[key] + + elif key in config: + return config[key] + + elif key in load_from_yaml("config.yml"): + return load_from_yaml("config.yml")[key] + + else: + return default + + +def init_conf(conf: dict) -> dict: + """ + 初始化配置文件,确保配置文件中的必要字段存在,且不会冲突 + Args: + conf: + + Returns: + + """ + # 若command_start中无"",则添加必要命令头,开启alconna_use_command_start防止冲突 + # 以下内容由于issue #53 被注释 + # if "" not in conf.get("command_start", []): + # conf["alconna_use_command_start"] = True + return conf + pass diff --git a/src/utils/base/data.py b/src/utils/base/data.py index 894ce54f..3b8f0ed1 100644 --- a/src/utils/base/data.py +++ b/src/utils/base/data.py @@ -1,436 +1,436 @@ -import inspect -import os -import pickle -import sqlite3 -from types import NoneType -from typing import Any, Callable - -from nonebot import logger -from nonebot.compat import PYDANTIC_V2 -from pydantic import BaseModel - - -class LiteModel(BaseModel): - TABLE_NAME: str = None - id: int = None - - def dump(self, *args, **kwargs): - if PYDANTIC_V2: - return self.model_dump(*args, **kwargs) - else: - return self.dict(*args, **kwargs) - - -class Database: - def __init__(self, db_name: str): - - if os.path.dirname(db_name) != "" and not os.path.exists(os.path.dirname(db_name)): - os.makedirs(os.path.dirname(db_name)) - - self.db_name = db_name - self.conn = sqlite3.connect(db_name, check_same_thread=False) - self.cursor = self.conn.cursor() - - self._on_save_callbacks = [] - self._is_locked = False - - def lock(self): - self.cursor.execute("BEGIN TRANSACTION") - self._is_locked = True - - def lock_query(self, query: str, *args): - """锁定查询""" - self.cursor.execute(query, args).fetchall() - - def lock_model(self, model: LiteModel) -> LiteModel | Any | None: - """锁定行 - Args: - model: 数据模型实例 - - - Returns: - - """ - pass - - def unlock(self): - self.cursor.execute("COMMIT") - self._is_locked = False - - def where_one(self, model: LiteModel, condition: str = "", *args: Any, default: Any = None) -> LiteModel | Any | None: - """查询第一个 - Args: - model: 数据模型实例 - condition: 查询条件,不给定则查询所有 - *args: 参数化查询参数 - default: 默认值 - - Returns: - - """ - all_results = self.where_all(model, condition, *args) - return all_results[0] if all_results else default - - def where_all(self, model: LiteModel, condition: str = "", *args: Any, default: Any = None) -> list[LiteModel | Any] | None: - """查询所有 - Args: - model: 数据模型实例 - condition: 查询条件,不给定则查询所有 - *args: 参数化查询参数 - default: 默认值 - - Returns: - - """ - table_name = model.TABLE_NAME - model_type = type(model) - logger.debug(f"Selecting {model.TABLE_NAME} WHERE {condition.replace('?', '%s') % args}") - if not table_name: - raise ValueError(f"数据模型{model_type.__name__}未提供表名") - - # condition = f"WHERE {condition}" - # print(f"SELECT * FROM {table_name} {condition}", args) - # if len(args) == 0: - # results = self.cursor.execute(f"SELECT * FROM {table_name} {condition}").fetchall() - # else: - # results = self.cursor.execute(f"SELECT * FROM {table_name} {condition}", args).fetchall() - if condition: - results = self.cursor.execute(f"SELECT * FROM {table_name} WHERE {condition}", args).fetchall() - else: - results = self.cursor.execute(f"SELECT * FROM {table_name}").fetchall() - fields = [description[0] for description in self.cursor.description] - if not results: - return default - else: - return [model_type(**self._load(dict(zip(fields, result)))) for result in results] - - def save(self, *args: LiteModel): - self.returns_ = """增/改操作 - Args: - *args: - Returns: - """ - table_list = [item[0] for item in self.cursor.execute("SELECT name FROM sqlite_master WHERE type ='table'").fetchall()] - for model in args: - logger.debug(f"Upserting {model}") - if not model.TABLE_NAME: - raise ValueError(f"数据模型 {model.__class__.__name__} 未提供表名") - elif model.TABLE_NAME not in table_list: - raise ValueError(f"数据模型 {model.__class__.__name__} 表 {model.TABLE_NAME} 不存在,请先迁移") - else: - self._save(model.dump(by_alias=True)) - - for callback in self._on_save_callbacks: - callback(model) - - def _save(self, obj: Any) -> Any: - # obj = copy.deepcopy(obj) - if isinstance(obj, dict): - table_name = obj.get("TABLE_NAME") - row_id = obj.get("id") - new_obj = {} - for field, value in obj.items(): - if isinstance(value, self.ITERABLE_TYPE): - new_obj[self._get_stored_field_prefix(value) + field] = self._save(value) # self._save(value) # -> bytes - elif isinstance(value, self.BASIC_TYPE): - new_obj[field] = value - else: - raise ValueError(f"数据模型{table_name}包含不支持的数据类型,字段:{field} 值:{value} 值类型:{type(value)}") - if table_name: - fields, values = [], [] - for n_field, n_value in new_obj.items(): - if n_field not in ["TABLE_NAME", "id"]: - fields.append(n_field) - values.append(n_value) - # 移除TABLE_NAME和id - fields = list(fields) - values = list(values) - if row_id is not None: - # 如果 _id 不为空,将 'id' 插入到字段列表的开始 - fields.insert(0, 'id') - # 将 _id 插入到值列表的开始 - values.insert(0, row_id) - fields = ', '.join([f'"{field}"' for field in fields]) - placeholders = ', '.join('?' for _ in values) - self.cursor.execute(f"INSERT OR REPLACE INTO {table_name}({fields}) VALUES ({placeholders})", tuple(values)) - # self.conn.commit() - if self._is_locked: - pass - else: - self.conn.commit() - foreign_id = self.cursor.execute("SELECT last_insert_rowid()").fetchone()[0] - return f"{self.FOREIGN_KEY_PREFIX}{foreign_id}@{table_name}" # -> FOREIGN_KEY_123456@{table_name} id@{table_name} - else: - return pickle.dumps(new_obj) # -> bytes - elif isinstance(obj, (list, set, tuple)): - obj_type = type(obj) # 到时候转回去 - new_obj = [] - for item in obj: - if isinstance(item, self.ITERABLE_TYPE): - new_obj.append(self._save(item)) - elif isinstance(item, self.BASIC_TYPE): - new_obj.append(item) - else: - raise ValueError(f"数据模型包含不支持的数据类型,值:{item} 值类型:{type(item)}") - return pickle.dumps(obj_type(new_obj)) # -> bytes - else: - raise ValueError(f"数据模型包含不支持的数据类型,值:{obj} 值类型:{type(obj)}") - - def _load(self, obj: Any) -> Any: - - if isinstance(obj, dict): - - new_obj = {} - - for field, value in obj.items(): - - field: str - - if field.startswith(self.BYTES_PREFIX): - if isinstance(value, bytes): - new_obj[field.replace(self.BYTES_PREFIX, "")] = self._load(pickle.loads(value)) - else: # 从value字段可能为None,fix at 2024/6/13 - pass - # 暂时不作处理,后面再修 - - elif field.startswith(self.FOREIGN_KEY_PREFIX): - - new_obj[field.replace(self.FOREIGN_KEY_PREFIX, "")] = self._load(self._get_foreign_data(value)) - - else: - new_obj[field] = value - return new_obj - elif isinstance(obj, (list, set, tuple)): - - new_obj = [] - for item in obj: - - if isinstance(item, bytes): - - # 对bytes进行尝试解析,解析失败则返回原始bytes - try: - new_obj.append(self._load(pickle.loads(item))) - except Exception as e: - new_obj.append(self._load(item)) - - elif isinstance(item, str) and item.startswith(self.FOREIGN_KEY_PREFIX): - new_obj.append(self._load(self._get_foreign_data(item))) - else: - new_obj.append(self._load(item)) - return new_obj - else: - return obj - - def delete(self, model: LiteModel, condition: str, *args: Any, allow_empty: bool = False): - """ - 删除满足条件的数据 - Args: - allow_empty: 允许空条件删除整个表 - model: - condition: - *args: - - Returns: - - """ - table_name = model.TABLE_NAME - logger.debug(f"Deleting {model} WHERE {condition} {args}") - if not table_name: - raise ValueError(f"数据模型{model.__class__.__name__}未提供表名") - if model.id is not None: - condition = f"id = {model.id}" - if not condition and not allow_empty: - raise ValueError("删除操作必须提供条件") - self.cursor.execute(f"DELETE FROM {table_name} WHERE {condition}", args) - if self._is_locked: - pass - else: - self.conn.commit() - - def auto_migrate(self, *args: LiteModel): - - """ - 自动迁移模型 - Args: - *args: 模型类实例化对象,支持空默认值,不支持嵌套迁移 - - Returns: - - """ - for model in args: - if not model.TABLE_NAME: - raise ValueError(f"数据模型{type(model).__name__}未提供表名") - - # 若无则创建表 - self.cursor.execute( - f'CREATE TABLE IF NOT EXISTS "{model.TABLE_NAME}" (id INTEGER PRIMARY KEY AUTOINCREMENT)' - ) - - # 获取表结构,field -> SqliteType - new_structure = {} - for n_field, n_value in model.dump(by_alias=True).items(): - if n_field not in ["TABLE_NAME", "id"]: - new_structure[self._get_stored_field_prefix(n_value) + n_field] = self._get_stored_type(n_value) - - # 原有的字段列表 - existing_structure = dict([(column[1], column[2]) for column in self.cursor.execute(f'PRAGMA table_info({model.TABLE_NAME})').fetchall()]) - # 检测缺失字段,由于SQLite是动态类型,所以不需要检测类型 - for n_field, n_type in new_structure.items(): - if n_field not in existing_structure.keys() and n_field.lower() not in ["id", "table_name"]: - default_value = self.DEFAULT_MAPPING.get(n_type, 'NULL') - self.cursor.execute( - f"ALTER TABLE '{model.TABLE_NAME}' ADD COLUMN {n_field} {n_type} DEFAULT {self.DEFAULT_MAPPING.get(n_type, default_value)}" - ) - - # 检测多余字段进行删除 - for e_field in existing_structure.keys(): - if e_field not in new_structure.keys() and e_field.lower() not in ['id']: - self.cursor.execute( - f'ALTER TABLE "{model.TABLE_NAME}" DROP COLUMN "{e_field}"' - ) - self.conn.commit() - # 已完成 - - def _get_stored_field_prefix(self, value) -> str: - """根据类型获取存储字段前缀,一定在后加上字段名 - * -> "" - Args: - value: 储存的值 - - Returns: - Sqlite3存储字段 - """ - - if isinstance(value, LiteModel) or isinstance(value, dict) and "TABLE_NAME" in value: - return self.FOREIGN_KEY_PREFIX - elif type(value) in self.ITERABLE_TYPE: - return self.BYTES_PREFIX - return "" - - def _get_stored_type(self, value) -> str: - """获取存储类型 - - Args: - value: 储存的值 - - Returns: - Sqlite3存储类型 - """ - if isinstance(value, dict) and "TABLE_NAME" in value: - # 是一个模型字典,储存外键 - return "INTEGER" - return self.TYPE_MAPPING.get(type(value), "TEXT") - - def _get_foreign_data(self, foreign_value: str) -> dict: - """ - 获取外键数据 - Args: - foreign_value: - - Returns: - - """ - foreign_value = foreign_value.replace(self.FOREIGN_KEY_PREFIX, "") - table_name = foreign_value.split("@")[-1] - foreign_id = foreign_value.split("@")[0] - fields = [description[1] for description in self.cursor.execute(f"PRAGMA table_info({table_name})").fetchall()] - result = self.cursor.execute(f"SELECT * FROM {table_name} WHERE id = ?", (foreign_id,)).fetchone() - return dict(zip(fields, result)) - - def on_save(self, func: Callable[[LiteModel | Any], None]): - """ - 装饰一个可调用对象使其在储存数据模型时被调用 - Args: - func: - Returns: - """ - - def wrapper(model): - # 检查被装饰函数声明的model类型和传入的model类型是否一致 - sign = inspect.signature(func) - if param := sign.parameters.get("model"): - if isinstance(model, param.annotation): - pass - else: - return - else: - return - result = func(model) - for callback in self._on_save_callbacks: - callback(result) - return result - - self._on_save_callbacks.append(wrapper) - return wrapper - - TYPE_MAPPING = { - int : "INTEGER", - float : "REAL", - str : "TEXT", - bool : "INTEGER", - bytes : "BLOB", - NoneType : "NULL", - # dict : "TEXT", - # list : "TEXT", - # tuple : "TEXT", - # set : "TEXT", - - dict : "BLOB", # LITEYUKIDICT{key_name} - list : "BLOB", # LITEYUKILIST{key_name} - tuple : "BLOB", # LITEYUKITUPLE{key_name} - set : "BLOB", # LITEYUKISET{key_name} - LiteModel: "TEXT" # FOREIGN_KEY_{table_name} - } - DEFAULT_MAPPING = { - "TEXT" : "''", - "INTEGER": 0, - "REAL" : 0.0, - "BLOB" : None, - "NULL" : None - } - - # 基础类型 - BASIC_TYPE = (int, float, str, bool, bytes, NoneType) - # 可序列化类型 - ITERABLE_TYPE = (dict, list, tuple, set, LiteModel) - - # 外键前缀 - FOREIGN_KEY_PREFIX = "FOREIGN_KEY_" - # 转换为的字节前缀 - BYTES_PREFIX = "PICKLE_BYTES_" - - # transaction tx 事务操作 - def first(self, model: LiteModel) -> "Database": - pass - - def where(self, condition: str, *args) -> "Database": - pass - - def limit(self, limit: int) -> "Database": - pass - - def order(self, order: str) -> "Database": - pass - - -def check_sqlite_keyword(name): - sqlite_keywords = [ - "ABORT", "ACTION", "ADD", "AFTER", "ALL", "ALTER", "ANALYZE", "AND", "AS", "ASC", - "ATTACH", "AUTOINCREMENT", "BEFORE", "BEGIN", "BETWEEN", "BY", "CASCADE", "CASE", - "CAST", "CHECK", "COLLATE", "COLUMN", "COMMIT", "CONFLICT", "CONSTRAINT", "CREATE", - "CROSS", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DATABASE", "DEFAULT", - "DEFERRABLE", "DEFERRED", "DELETE", "DESC", "DETACH", "DISTINCT", "DROP", "EACH", - "ELSE", "END", "ESCAPE", "EXCEPT", "EXCLUSIVE", "EXISTS", "EXPLAIN", "FAIL", "FOR", - "FOREIGN", "FROM", "FULL", "GLOB", "GROUP", "HAVING", "IF", "IGNORE", "IMMEDIATE", - "IN", "INDEX", "INDEXED", "INITIALLY", "INNER", "INSERT", "INSTEAD", "INTERSECT", - "INTO", "IS", "ISNULL", "JOIN", "KEY", "LEFT", "LIKE", "LIMIT", "MATCH", "NATURAL", - "NO", "NOT", "NOTNULL", "NULL", "OF", "OFFSET", "ON", "OR", "ORDER", "OUTER", "PLAN", - "PRAGMA", "PRIMARY", "QUERY", "RAISE", "RECURSIVE", "REFERENCES", "REGEXP", "REINDEX", - "RELEASE", "RENAME", "REPLACE", "RESTRICT", "RIGHT", "ROLLBACK", "ROW", "SAVEPOINT", - "SELECT", "SET", "TABLE", "TEMP", "TEMPORARY", "THEN", "TO", "TRANSACTION", "TRIGGER", - "UNION", "UNIQUE", "UPDATE", "USING", "VACUUM", "VALUES", "VIEW", "VIRTUAL", "WHEN", - "WHERE", "WITH", "WITHOUT" - ] - return True - # if name.upper() in sqlite_keywords: +import inspect +import os +import pickle +import sqlite3 +from types import NoneType +from typing import Any, Callable + +from nonebot import logger +from nonebot.compat import PYDANTIC_V2 +from pydantic import BaseModel + + +class LiteModel(BaseModel): + TABLE_NAME: str = None + id: int = None + + def dump(self, *args, **kwargs): + if PYDANTIC_V2: + return self.model_dump(*args, **kwargs) + else: + return self.dict(*args, **kwargs) + + +class Database: + def __init__(self, db_name: str): + + if os.path.dirname(db_name) != "" and not os.path.exists(os.path.dirname(db_name)): + os.makedirs(os.path.dirname(db_name)) + + self.db_name = db_name + self.conn = sqlite3.connect(db_name, check_same_thread=False) + self.cursor = self.conn.cursor() + + self._on_save_callbacks = [] + self._is_locked = False + + def lock(self): + self.cursor.execute("BEGIN TRANSACTION") + self._is_locked = True + + def lock_query(self, query: str, *args): + """锁定查询""" + self.cursor.execute(query, args).fetchall() + + def lock_model(self, model: LiteModel) -> LiteModel | Any | None: + """锁定行 + Args: + model: 数据模型实例 + + + Returns: + + """ + pass + + def unlock(self): + self.cursor.execute("COMMIT") + self._is_locked = False + + def where_one(self, model: LiteModel, condition: str = "", *args: Any, default: Any = None) -> LiteModel | Any | None: + """查询第一个 + Args: + model: 数据模型实例 + condition: 查询条件,不给定则查询所有 + *args: 参数化查询参数 + default: 默认值 + + Returns: + + """ + all_results = self.where_all(model, condition, *args) + return all_results[0] if all_results else default + + def where_all(self, model: LiteModel, condition: str = "", *args: Any, default: Any = None) -> list[LiteModel | Any] | None: + """查询所有 + Args: + model: 数据模型实例 + condition: 查询条件,不给定则查询所有 + *args: 参数化查询参数 + default: 默认值 + + Returns: + + """ + table_name = model.TABLE_NAME + model_type = type(model) + logger.debug(f"Selecting {model.TABLE_NAME} WHERE {condition.replace('?', '%s') % args}") + if not table_name: + raise ValueError(f"数据模型{model_type.__name__}未提供表名") + + # condition = f"WHERE {condition}" + # print(f"SELECT * FROM {table_name} {condition}", args) + # if len(args) == 0: + # results = self.cursor.execute(f"SELECT * FROM {table_name} {condition}").fetchall() + # else: + # results = self.cursor.execute(f"SELECT * FROM {table_name} {condition}", args).fetchall() + if condition: + results = self.cursor.execute(f"SELECT * FROM {table_name} WHERE {condition}", args).fetchall() + else: + results = self.cursor.execute(f"SELECT * FROM {table_name}").fetchall() + fields = [description[0] for description in self.cursor.description] + if not results: + return default + else: + return [model_type(**self._load(dict(zip(fields, result)))) for result in results] + + def save(self, *args: LiteModel): + self.returns_ = """增/改操作 + Args: + *args: + Returns: + """ + table_list = [item[0] for item in self.cursor.execute("SELECT name FROM sqlite_master WHERE type ='table'").fetchall()] + for model in args: + logger.debug(f"Upserting {model}") + if not model.TABLE_NAME: + raise ValueError(f"数据模型 {model.__class__.__name__} 未提供表名") + elif model.TABLE_NAME not in table_list: + raise ValueError(f"数据模型 {model.__class__.__name__} 表 {model.TABLE_NAME} 不存在,请先迁移") + else: + self._save(model.dump(by_alias=True)) + + for callback in self._on_save_callbacks: + callback(model) + + def _save(self, obj: Any) -> Any: + # obj = copy.deepcopy(obj) + if isinstance(obj, dict): + table_name = obj.get("TABLE_NAME") + row_id = obj.get("id") + new_obj = {} + for field, value in obj.items(): + if isinstance(value, self.ITERABLE_TYPE): + new_obj[self._get_stored_field_prefix(value) + field] = self._save(value) # self._save(value) # -> bytes + elif isinstance(value, self.BASIC_TYPE): + new_obj[field] = value + else: + raise ValueError(f"数据模型{table_name}包含不支持的数据类型,字段:{field} 值:{value} 值类型:{type(value)}") + if table_name: + fields, values = [], [] + for n_field, n_value in new_obj.items(): + if n_field not in ["TABLE_NAME", "id"]: + fields.append(n_field) + values.append(n_value) + # 移除TABLE_NAME和id + fields = list(fields) + values = list(values) + if row_id is not None: + # 如果 _id 不为空,将 'id' 插入到字段列表的开始 + fields.insert(0, 'id') + # 将 _id 插入到值列表的开始 + values.insert(0, row_id) + fields = ', '.join([f'"{field}"' for field in fields]) + placeholders = ', '.join('?' for _ in values) + self.cursor.execute(f"INSERT OR REPLACE INTO {table_name}({fields}) VALUES ({placeholders})", tuple(values)) + # self.conn.commit() + if self._is_locked: + pass + else: + self.conn.commit() + foreign_id = self.cursor.execute("SELECT last_insert_rowid()").fetchone()[0] + return f"{self.FOREIGN_KEY_PREFIX}{foreign_id}@{table_name}" # -> FOREIGN_KEY_123456@{table_name} id@{table_name} + else: + return pickle.dumps(new_obj) # -> bytes + elif isinstance(obj, (list, set, tuple)): + obj_type = type(obj) # 到时候转回去 + new_obj = [] + for item in obj: + if isinstance(item, self.ITERABLE_TYPE): + new_obj.append(self._save(item)) + elif isinstance(item, self.BASIC_TYPE): + new_obj.append(item) + else: + raise ValueError(f"数据模型包含不支持的数据类型,值:{item} 值类型:{type(item)}") + return pickle.dumps(obj_type(new_obj)) # -> bytes + else: + raise ValueError(f"数据模型包含不支持的数据类型,值:{obj} 值类型:{type(obj)}") + + def _load(self, obj: Any) -> Any: + + if isinstance(obj, dict): + + new_obj = {} + + for field, value in obj.items(): + + field: str + + if field.startswith(self.BYTES_PREFIX): + if isinstance(value, bytes): + new_obj[field.replace(self.BYTES_PREFIX, "")] = self._load(pickle.loads(value)) + else: # 从value字段可能为None,fix at 2024/6/13 + pass + # 暂时不作处理,后面再修 + + elif field.startswith(self.FOREIGN_KEY_PREFIX): + + new_obj[field.replace(self.FOREIGN_KEY_PREFIX, "")] = self._load(self._get_foreign_data(value)) + + else: + new_obj[field] = value + return new_obj + elif isinstance(obj, (list, set, tuple)): + + new_obj = [] + for item in obj: + + if isinstance(item, bytes): + + # 对bytes进行尝试解析,解析失败则返回原始bytes + try: + new_obj.append(self._load(pickle.loads(item))) + except Exception as e: + new_obj.append(self._load(item)) + + elif isinstance(item, str) and item.startswith(self.FOREIGN_KEY_PREFIX): + new_obj.append(self._load(self._get_foreign_data(item))) + else: + new_obj.append(self._load(item)) + return new_obj + else: + return obj + + def delete(self, model: LiteModel, condition: str, *args: Any, allow_empty: bool = False): + """ + 删除满足条件的数据 + Args: + allow_empty: 允许空条件删除整个表 + model: + condition: + *args: + + Returns: + + """ + table_name = model.TABLE_NAME + logger.debug(f"Deleting {model} WHERE {condition} {args}") + if not table_name: + raise ValueError(f"数据模型{model.__class__.__name__}未提供表名") + if model.id is not None: + condition = f"id = {model.id}" + if not condition and not allow_empty: + raise ValueError("删除操作必须提供条件") + self.cursor.execute(f"DELETE FROM {table_name} WHERE {condition}", args) + if self._is_locked: + pass + else: + self.conn.commit() + + def auto_migrate(self, *args: LiteModel): + + """ + 自动迁移模型 + Args: + *args: 模型类实例化对象,支持空默认值,不支持嵌套迁移 + + Returns: + + """ + for model in args: + if not model.TABLE_NAME: + raise ValueError(f"数据模型{type(model).__name__}未提供表名") + + # 若无则创建表 + self.cursor.execute( + f'CREATE TABLE IF NOT EXISTS "{model.TABLE_NAME}" (id INTEGER PRIMARY KEY AUTOINCREMENT)' + ) + + # 获取表结构,field -> SqliteType + new_structure = {} + for n_field, n_value in model.dump(by_alias=True).items(): + if n_field not in ["TABLE_NAME", "id"]: + new_structure[self._get_stored_field_prefix(n_value) + n_field] = self._get_stored_type(n_value) + + # 原有的字段列表 + existing_structure = dict([(column[1], column[2]) for column in self.cursor.execute(f'PRAGMA table_info({model.TABLE_NAME})').fetchall()]) + # 检测缺失字段,由于SQLite是动态类型,所以不需要检测类型 + for n_field, n_type in new_structure.items(): + if n_field not in existing_structure.keys() and n_field.lower() not in ["id", "table_name"]: + default_value = self.DEFAULT_MAPPING.get(n_type, 'NULL') + self.cursor.execute( + f"ALTER TABLE '{model.TABLE_NAME}' ADD COLUMN {n_field} {n_type} DEFAULT {self.DEFAULT_MAPPING.get(n_type, default_value)}" + ) + + # 检测多余字段进行删除 + for e_field in existing_structure.keys(): + if e_field not in new_structure.keys() and e_field.lower() not in ['id']: + self.cursor.execute( + f'ALTER TABLE "{model.TABLE_NAME}" DROP COLUMN "{e_field}"' + ) + self.conn.commit() + # 已完成 + + def _get_stored_field_prefix(self, value) -> str: + """根据类型获取存储字段前缀,一定在后加上字段名 + * -> "" + Args: + value: 储存的值 + + Returns: + Sqlite3存储字段 + """ + + if isinstance(value, LiteModel) or isinstance(value, dict) and "TABLE_NAME" in value: + return self.FOREIGN_KEY_PREFIX + elif type(value) in self.ITERABLE_TYPE: + return self.BYTES_PREFIX + return "" + + def _get_stored_type(self, value) -> str: + """获取存储类型 + + Args: + value: 储存的值 + + Returns: + Sqlite3存储类型 + """ + if isinstance(value, dict) and "TABLE_NAME" in value: + # 是一个模型字典,储存外键 + return "INTEGER" + return self.TYPE_MAPPING.get(type(value), "TEXT") + + def _get_foreign_data(self, foreign_value: str) -> dict: + """ + 获取外键数据 + Args: + foreign_value: + + Returns: + + """ + foreign_value = foreign_value.replace(self.FOREIGN_KEY_PREFIX, "") + table_name = foreign_value.split("@")[-1] + foreign_id = foreign_value.split("@")[0] + fields = [description[1] for description in self.cursor.execute(f"PRAGMA table_info({table_name})").fetchall()] + result = self.cursor.execute(f"SELECT * FROM {table_name} WHERE id = ?", (foreign_id,)).fetchone() + return dict(zip(fields, result)) + + def on_save(self, func: Callable[[LiteModel | Any], None]): + """ + 装饰一个可调用对象使其在储存数据模型时被调用 + Args: + func: + Returns: + """ + + def wrapper(model): + # 检查被装饰函数声明的model类型和传入的model类型是否一致 + sign = inspect.signature(func) + if param := sign.parameters.get("model"): + if isinstance(model, param.annotation): + pass + else: + return + else: + return + result = func(model) + for callback in self._on_save_callbacks: + callback(result) + return result + + self._on_save_callbacks.append(wrapper) + return wrapper + + TYPE_MAPPING = { + int : "INTEGER", + float : "REAL", + str : "TEXT", + bool : "INTEGER", + bytes : "BLOB", + NoneType : "NULL", + # dict : "TEXT", + # list : "TEXT", + # tuple : "TEXT", + # set : "TEXT", + + dict : "BLOB", # LITEYUKIDICT{key_name} + list : "BLOB", # LITEYUKILIST{key_name} + tuple : "BLOB", # LITEYUKITUPLE{key_name} + set : "BLOB", # LITEYUKISET{key_name} + LiteModel: "TEXT" # FOREIGN_KEY_{table_name} + } + DEFAULT_MAPPING = { + "TEXT" : "''", + "INTEGER": 0, + "REAL" : 0.0, + "BLOB" : None, + "NULL" : None + } + + # 基础类型 + BASIC_TYPE = (int, float, str, bool, bytes, NoneType) + # 可序列化类型 + ITERABLE_TYPE = (dict, list, tuple, set, LiteModel) + + # 外键前缀 + FOREIGN_KEY_PREFIX = "FOREIGN_KEY_" + # 转换为的字节前缀 + BYTES_PREFIX = "PICKLE_BYTES_" + + # transaction tx 事务操作 + def first(self, model: LiteModel) -> "Database": + pass + + def where(self, condition: str, *args) -> "Database": + pass + + def limit(self, limit: int) -> "Database": + pass + + def order(self, order: str) -> "Database": + pass + + +def check_sqlite_keyword(name): + sqlite_keywords = [ + "ABORT", "ACTION", "ADD", "AFTER", "ALL", "ALTER", "ANALYZE", "AND", "AS", "ASC", + "ATTACH", "AUTOINCREMENT", "BEFORE", "BEGIN", "BETWEEN", "BY", "CASCADE", "CASE", + "CAST", "CHECK", "COLLATE", "COLUMN", "COMMIT", "CONFLICT", "CONSTRAINT", "CREATE", + "CROSS", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DATABASE", "DEFAULT", + "DEFERRABLE", "DEFERRED", "DELETE", "DESC", "DETACH", "DISTINCT", "DROP", "EACH", + "ELSE", "END", "ESCAPE", "EXCEPT", "EXCLUSIVE", "EXISTS", "EXPLAIN", "FAIL", "FOR", + "FOREIGN", "FROM", "FULL", "GLOB", "GROUP", "HAVING", "IF", "IGNORE", "IMMEDIATE", + "IN", "INDEX", "INDEXED", "INITIALLY", "INNER", "INSERT", "INSTEAD", "INTERSECT", + "INTO", "IS", "ISNULL", "JOIN", "KEY", "LEFT", "LIKE", "LIMIT", "MATCH", "NATURAL", + "NO", "NOT", "NOTNULL", "NULL", "OF", "OFFSET", "ON", "OR", "ORDER", "OUTER", "PLAN", + "PRAGMA", "PRIMARY", "QUERY", "RAISE", "RECURSIVE", "REFERENCES", "REGEXP", "REINDEX", + "RELEASE", "RENAME", "REPLACE", "RESTRICT", "RIGHT", "ROLLBACK", "ROW", "SAVEPOINT", + "SELECT", "SET", "TABLE", "TEMP", "TEMPORARY", "THEN", "TO", "TRANSACTION", "TRIGGER", + "UNION", "UNIQUE", "UPDATE", "USING", "VACUUM", "VALUES", "VIEW", "VIRTUAL", "WHEN", + "WHERE", "WITH", "WITHOUT" + ] + return True + # if name.upper() in sqlite_keywords: # raise ValueError(f"'{name}' 是SQLite保留字,不建议使用,请更换名称") \ No newline at end of file diff --git a/src/utils/base/data_manager.py b/src/utils/base/data_manager.py index 5ff09269..51fa1aaa 100644 --- a/src/utils/base/data_manager.py +++ b/src/utils/base/data_manager.py @@ -1,99 +1,99 @@ -import os - -from pydantic import Field - -from .data import Database, LiteModel - -DATA_PATH = "data/liteyuki" -user_db: Database = Database(os.path.join(DATA_PATH, "users.ldb")) -group_db: Database = Database(os.path.join(DATA_PATH, "groups.ldb")) -plugin_db: Database = Database(os.path.join(DATA_PATH, "plugins.ldb")) -common_db: Database = Database(os.path.join(DATA_PATH, "common.ldb")) - -# 内存数据库,临时用于存储数据 -memory_database = { - -} - - -class User(LiteModel): - TABLE_NAME: str = "user" - user_id: str = Field(str(), alias="user_id") - username: str = Field(str(), alias="username") - profile: dict[str, str] = Field(dict(), alias="profile") - enabled_plugins: list[str] = Field(list(), alias="enabled_plugins") - disabled_plugins: list[str] = Field(list(), alias="disabled_plugins") - - -class Group(LiteModel): - TABLE_NAME: str = "group_chat" - # Group是一个关键字,所以这里用GroupChat - group_id: str = Field(str(), alias="group_id") - group_name: str = Field(str(), alias="group_name") - enabled_plugins: list[str] = Field([], alias="enabled_plugins") - disabled_plugins: list[str] = Field([], alias="disabled_plugins") - enable: bool = Field(True, alias="enable") # 群聊全局机器人是否启用 - config: dict = Field({}, alias="config") - - -class InstalledPlugin(LiteModel): - TABLE_NAME: str = "installed_plugin" - module_name: str = Field(str(), alias="module_name") - version: str = Field(str(), alias="version") - - -class GlobalPlugin(LiteModel): - TABLE_NAME: str = "global_plugin" - liteyuki: bool = Field(True, alias="liteyuki") # 是否为LiteYuki插件 - module_name: str = Field(str(), alias="module_name") - enabled: bool = Field(True, alias="enabled") - - -class StoredConfig(LiteModel): - TABLE_NAME: str = "stored_config" - config: dict = {} - - -class TempConfig(LiteModel): - """储存临时键值对的表""" - TABLE_NAME: str = "temp_data" - data: dict = {} - - - -def auto_migrate(): - user_db.auto_migrate(User()) - group_db.auto_migrate(Group()) - plugin_db.auto_migrate(InstalledPlugin(), GlobalPlugin()) - common_db.auto_migrate(GlobalPlugin(), TempConfig()) - - -auto_migrate() - - -def set_memory_data(key: str, value) -> None: - """ - 设置内存数据库的数据,类似于redis - Args: - key: - value: - - Returns: - - """ - return memory_database.update({ - key: value - }) - - -def get_memory_data(key: str, default=None) -> any: - """ - 获取内存数据库的数据,类似于redis - Args: - key: - default: - - Returns: - - """ - return memory_database.get(key, default) +import os + +from pydantic import Field + +from .data import Database, LiteModel + +DATA_PATH = "data/liteyuki" +user_db: Database = Database(os.path.join(DATA_PATH, "users.ldb")) +group_db: Database = Database(os.path.join(DATA_PATH, "groups.ldb")) +plugin_db: Database = Database(os.path.join(DATA_PATH, "plugins.ldb")) +common_db: Database = Database(os.path.join(DATA_PATH, "common.ldb")) + +# 内存数据库,临时用于存储数据 +memory_database = { + +} + + +class User(LiteModel): + TABLE_NAME: str = "user" + user_id: str = Field(str(), alias="user_id") + username: str = Field(str(), alias="username") + profile: dict[str, str] = Field(dict(), alias="profile") + enabled_plugins: list[str] = Field(list(), alias="enabled_plugins") + disabled_plugins: list[str] = Field(list(), alias="disabled_plugins") + + +class Group(LiteModel): + TABLE_NAME: str = "group_chat" + # Group是一个关键字,所以这里用GroupChat + group_id: str = Field(str(), alias="group_id") + group_name: str = Field(str(), alias="group_name") + enabled_plugins: list[str] = Field([], alias="enabled_plugins") + disabled_plugins: list[str] = Field([], alias="disabled_plugins") + enable: bool = Field(True, alias="enable") # 群聊全局机器人是否启用 + config: dict = Field({}, alias="config") + + +class InstalledPlugin(LiteModel): + TABLE_NAME: str = "installed_plugin" + module_name: str = Field(str(), alias="module_name") + version: str = Field(str(), alias="version") + + +class GlobalPlugin(LiteModel): + TABLE_NAME: str = "global_plugin" + liteyuki: bool = Field(True, alias="liteyuki") # 是否为LiteYuki插件 + module_name: str = Field(str(), alias="module_name") + enabled: bool = Field(True, alias="enabled") + + +class StoredConfig(LiteModel): + TABLE_NAME: str = "stored_config" + config: dict = {} + + +class TempConfig(LiteModel): + """储存临时键值对的表""" + TABLE_NAME: str = "temp_data" + data: dict = {} + + + +def auto_migrate(): + user_db.auto_migrate(User()) + group_db.auto_migrate(Group()) + plugin_db.auto_migrate(InstalledPlugin(), GlobalPlugin()) + common_db.auto_migrate(GlobalPlugin(), TempConfig()) + + +auto_migrate() + + +def set_memory_data(key: str, value) -> None: + """ + 设置内存数据库的数据,类似于redis + Args: + key: + value: + + Returns: + + """ + return memory_database.update({ + key: value + }) + + +def get_memory_data(key: str, default=None) -> any: + """ + 获取内存数据库的数据,类似于redis + Args: + key: + default: + + Returns: + + """ + return memory_database.get(key, default) diff --git a/src/utils/base/language.py b/src/utils/base/language.py index 962d9ad7..37418534 100644 --- a/src/utils/base/language.py +++ b/src/utils/base/language.py @@ -1,237 +1,237 @@ -""" -语言模块,添加对多语言的支持 -""" - -import json -import locale -import os -from typing import Any, overload - -import nonebot - -from .config import config, get_config -from .data_manager import User, user_db - -_language_data = { - "en": { - "name": "English", - } -} - -_user_lang = {"user_id": "zh-CN"} - - -def load_from_lang(file_path: str, lang_code: str = None): - """ - 从lang文件中加载语言数据,用于简单的文本键值对 - - Args: - file_path: lang文件路径 - lang_code: 语言代码,如果为None则从文件名中获取 - """ - try: - if lang_code is None: - lang_code = os.path.basename(file_path).split(".")[0] - with open(file_path, "r", encoding="utf-8") as file: - data = {} - for line in file: - line = line.strip() - if not line or line.startswith("#"): # 空行或注释 - continue - key, value = line.split("=", 1) - data[key.strip()] = value.strip() - if lang_code not in _language_data: - _language_data[lang_code] = {} - _language_data[lang_code].update(data) - nonebot.logger.debug(f"Loaded language data from {file_path}") - except Exception as e: - nonebot.logger.error(f"Failed to load language data from {file_path}: {e}") - - -def load_from_json(file_path: str, lang_code: str = None): - """ - 从json文件中加载语言数据,可以定义一些变量 - - Args: - lang_code: 语言代码,如果为None则从文件名中获取 - file_path: json文件路径 - """ - try: - if lang_code is None: - lang_code = os.path.basename(file_path).split(".")[0] - with open(file_path, "r", encoding="utf-8") as file: - data = json.load(file) - if lang_code not in _language_data: - _language_data[lang_code] = {} - _language_data[lang_code].update(data) - nonebot.logger.debug(f"Loaded language data from {file_path}") - except Exception as e: - nonebot.logger.error(f"Failed to load language data from {file_path}: {e}") - - -def load_from_dir(dir_path: str): - """ - 从目录中加载语言数据 - - Args: - dir_path: 目录路径 - """ - for file in os.listdir(dir_path): - try: - file_path = os.path.join(dir_path, file) - if os.path.isfile(file_path): - if file.endswith(".lang"): - load_from_lang(file_path) - elif file.endswith(".json"): - load_from_json(file_path) - except Exception as e: - nonebot.logger.error(f"Failed to load language data from {file}: {e}") - continue - - -def load_from_dict(data: dict, lang_code: str): - """ - 从字典中加载语言数据 - - Args: - lang_code: 语言代码 - data: 字典数据 - """ - if lang_code not in _language_data: - _language_data[lang_code] = {} - _language_data[lang_code].update(data) - - -class Language: - # 三重fallback - # 用户语言 > 默认语言/系统语言 > zh-CN - def __init__(self, lang_code: str = None, fallback_lang_code: str = None): - self.lang_code = lang_code - - if self.lang_code is None: - self.lang_code = get_default_lang_code() - - self.fallback_lang_code = fallback_lang_code - if self.fallback_lang_code is None: - self.fallback_lang_code = config.get( - "default_language", get_system_lang_code() - ) - - def _get(self, item: str, *args, **kwargs) -> str | Any: - """ - 获取当前语言文本,kwargs中的default参数为默认文本 - - **请不要重写本函数** - - Args: - item: 文本键 - *args: 格式化参数 - **kwargs: 格式化参数 - - Returns: - str: 当前语言的文本 - - """ - default = kwargs.pop("default", None) - fallback = (self.lang_code, self.fallback_lang_code, "zh-CN") - - for lang_code in fallback: - if lang_code in _language_data and item in _language_data[lang_code]: - trans: str = _language_data[lang_code][item] - try: - return trans.format(*args, **kwargs) - except Exception as e: - nonebot.logger.warning(f"Failed to format language data: {e}") - return trans - return default or item - - def get(self, item: str, *args, **kwargs) -> str | Any: - """ - 获取当前语言文本,kwargs中的default参数为默认文本 - Args: - item: 文本键 - *args: 格式化参数 - **kwargs: 格式化参数 - - Returns: - str: 当前语言的文本 - - """ - return self._get(item, *args, **kwargs) - - def get_many(self, *args: str, **kwargs) -> dict[str, str]: - """ - 获取多个文本 - Args: - *args: 文本键 - **kwargs: 文本键和默认文本 - - Returns: - dict: 多个文本 - """ - args_data = {item: self.get(item) for item in args} - kwargs_data = { - item: self.get(item, default=default) for item, default in kwargs.items() - } - args_data.update(kwargs_data) - return args_data - - -def change_user_lang(user_id: str, lang_code: str): - """ - 修改用户的语言,同时储存到数据库和内存中 - """ - user = user_db.where_one( - User(), "user_id = ?", user_id, default=User(user_id=user_id) - ) - user.profile["lang"] = lang_code - user_db.save(user) - _user_lang[user_id] = lang_code - - -def get_user_lang(user_id: str) -> Language: - """ - 获取用户的语言实例,优先从内存中获取 - """ - user_id = str(user_id) - - if user_id not in _user_lang: - nonebot.logger.debug(f"Loading user language for {user_id}") - user = user_db.where_one( - User(), - "user_id = ?", - user_id, - default=User(user_id=user_id, username="Unknown"), - ) - lang_code = user.profile.get("lang", get_default_lang_code()) - _user_lang[user_id] = lang_code - - return Language(_user_lang[user_id]) - - -def get_system_lang_code() -> str: - """ - 获取系统语言代码 - """ - return locale.getdefaultlocale()[0].replace("_", "-") - - -def get_default_lang_code() -> str: - """ - 获取默认语言代码,若没有设置则使用系统语言 - Returns: - - """ - return get_config("default_language", default=get_system_lang_code()) - - -def get_all_lang() -> dict[str, str]: - """ - 获取所有语言 - Returns - {'en': 'English'} - """ - d = {} - for key in _language_data: - d[key] = _language_data[key].get("language.name", key) - return d +""" +语言模块,添加对多语言的支持 +""" + +import json +import locale +import os +from typing import Any, overload + +import nonebot + +from .config import config, get_config +from .data_manager import User, user_db + +_language_data = { + "en": { + "name": "English", + } +} + +_user_lang = {"user_id": "zh-CN"} + + +def load_from_lang(file_path: str, lang_code: str = None): + """ + 从lang文件中加载语言数据,用于简单的文本键值对 + + Args: + file_path: lang文件路径 + lang_code: 语言代码,如果为None则从文件名中获取 + """ + try: + if lang_code is None: + lang_code = os.path.basename(file_path).split(".")[0] + with open(file_path, "r", encoding="utf-8") as file: + data = {} + for line in file: + line = line.strip() + if not line or line.startswith("#"): # 空行或注释 + continue + key, value = line.split("=", 1) + data[key.strip()] = value.strip() + if lang_code not in _language_data: + _language_data[lang_code] = {} + _language_data[lang_code].update(data) + nonebot.logger.debug(f"Loaded language data from {file_path}") + except Exception as e: + nonebot.logger.error(f"Failed to load language data from {file_path}: {e}") + + +def load_from_json(file_path: str, lang_code: str = None): + """ + 从json文件中加载语言数据,可以定义一些变量 + + Args: + lang_code: 语言代码,如果为None则从文件名中获取 + file_path: json文件路径 + """ + try: + if lang_code is None: + lang_code = os.path.basename(file_path).split(".")[0] + with open(file_path, "r", encoding="utf-8") as file: + data = json.load(file) + if lang_code not in _language_data: + _language_data[lang_code] = {} + _language_data[lang_code].update(data) + nonebot.logger.debug(f"Loaded language data from {file_path}") + except Exception as e: + nonebot.logger.error(f"Failed to load language data from {file_path}: {e}") + + +def load_from_dir(dir_path: str): + """ + 从目录中加载语言数据 + + Args: + dir_path: 目录路径 + """ + for file in os.listdir(dir_path): + try: + file_path = os.path.join(dir_path, file) + if os.path.isfile(file_path): + if file.endswith(".lang"): + load_from_lang(file_path) + elif file.endswith(".json"): + load_from_json(file_path) + except Exception as e: + nonebot.logger.error(f"Failed to load language data from {file}: {e}") + continue + + +def load_from_dict(data: dict, lang_code: str): + """ + 从字典中加载语言数据 + + Args: + lang_code: 语言代码 + data: 字典数据 + """ + if lang_code not in _language_data: + _language_data[lang_code] = {} + _language_data[lang_code].update(data) + + +class Language: + # 三重fallback + # 用户语言 > 默认语言/系统语言 > zh-CN + def __init__(self, lang_code: str = None, fallback_lang_code: str = None): + self.lang_code = lang_code + + if self.lang_code is None: + self.lang_code = get_default_lang_code() + + self.fallback_lang_code = fallback_lang_code + if self.fallback_lang_code is None: + self.fallback_lang_code = config.get( + "default_language", get_system_lang_code() + ) + + def _get(self, item: str, *args, **kwargs) -> str | Any: + """ + 获取当前语言文本,kwargs中的default参数为默认文本 + + **请不要重写本函数** + + Args: + item: 文本键 + *args: 格式化参数 + **kwargs: 格式化参数 + + Returns: + str: 当前语言的文本 + + """ + default = kwargs.pop("default", None) + fallback = (self.lang_code, self.fallback_lang_code, "zh-CN") + + for lang_code in fallback: + if lang_code in _language_data and item in _language_data[lang_code]: + trans: str = _language_data[lang_code][item] + try: + return trans.format(*args, **kwargs) + except Exception as e: + nonebot.logger.warning(f"Failed to format language data: {e}") + return trans + return default or item + + def get(self, item: str, *args, **kwargs) -> str | Any: + """ + 获取当前语言文本,kwargs中的default参数为默认文本 + Args: + item: 文本键 + *args: 格式化参数 + **kwargs: 格式化参数 + + Returns: + str: 当前语言的文本 + + """ + return self._get(item, *args, **kwargs) + + def get_many(self, *args: str, **kwargs) -> dict[str, str]: + """ + 获取多个文本 + Args: + *args: 文本键 + **kwargs: 文本键和默认文本 + + Returns: + dict: 多个文本 + """ + args_data = {item: self.get(item) for item in args} + kwargs_data = { + item: self.get(item, default=default) for item, default in kwargs.items() + } + args_data.update(kwargs_data) + return args_data + + +def change_user_lang(user_id: str, lang_code: str): + """ + 修改用户的语言,同时储存到数据库和内存中 + """ + user = user_db.where_one( + User(), "user_id = ?", user_id, default=User(user_id=user_id) + ) + user.profile["lang"] = lang_code + user_db.save(user) + _user_lang[user_id] = lang_code + + +def get_user_lang(user_id: str) -> Language: + """ + 获取用户的语言实例,优先从内存中获取 + """ + user_id = str(user_id) + + if user_id not in _user_lang: + nonebot.logger.debug(f"Loading user language for {user_id}") + user = user_db.where_one( + User(), + "user_id = ?", + user_id, + default=User(user_id=user_id, username="Unknown"), + ) + lang_code = user.profile.get("lang", get_default_lang_code()) + _user_lang[user_id] = lang_code + + return Language(_user_lang[user_id]) + + +def get_system_lang_code() -> str: + """ + 获取系统语言代码 + """ + return locale.getdefaultlocale()[0].replace("_", "-") + + +def get_default_lang_code() -> str: + """ + 获取默认语言代码,若没有设置则使用系统语言 + Returns: + + """ + return get_config("default_language", default=get_system_lang_code()) + + +def get_all_lang() -> dict[str, str]: + """ + 获取所有语言 + Returns + {'en': 'English'} + """ + d = {} + for key in _language_data: + d[key] = _language_data[key].get("language.name", key) + return d diff --git a/src/utils/base/log.py b/src/utils/base/log.py index 65362bf6..3e2b09ab 100644 --- a/src/utils/base/log.py +++ b/src/utils/base/log.py @@ -1,79 +1,79 @@ -import sys -import loguru -from typing import TYPE_CHECKING -from .config import load_from_yaml -from .language import Language, get_default_lang_code - -logger = loguru.logger -if TYPE_CHECKING: - # avoid sphinx autodoc resolve annotation failed - # because loguru module do not have `Logger` class actually - from loguru import Record - - -def default_filter(record: "Record"): - """默认的日志过滤器,根据 `config.log_level` 配置改变日志等级。""" - log_level = record["extra"].get("nonebot_log_level", "INFO") - levelno = logger.level(log_level).no if isinstance(log_level, str) else log_level - return record["level"].no >= levelno - - -# DEBUG日志格式 -debug_format: str = ( - "{time:YYYY-MM-DD HH:mm:ss} " - "[{level.icon}] " - "<{name}.{module}.{function}:{line}> " - "{message}" -) - -# 默认日志格式 -default_format: str = ( - "{time:MM-DD HH:mm:ss} " - "[{level.icon}] " - "<{name}> " - "{message}" -) - - -def get_format(level: str) -> str: - if level == "DEBUG": - return debug_format - else: - return default_format - - -logger = loguru.logger.bind() - - -def init_log(): - """ - 在语言加载完成后执行 - Returns: - - """ - global logger - - config = load_from_yaml("config.yml") - - logger.remove() - logger.add( - sys.stdout, - level=0, - diagnose=False, - filter=default_filter, - format=get_format(config.get("log_level", "INFO")), - ) - show_icon = config.get("log_icon", True) - lang = Language(get_default_lang_code()) - - debug = lang.get("log.debug", default="==DEBUG") - info = lang.get("log.info", default="===INFO") - success = lang.get("log.success", default="SUCCESS") - warning = lang.get("log.warning", default="WARNING") - error = lang.get("log.error", default="==ERROR") - - logger.level("DEBUG", color="", icon=f"{'🐛' if show_icon else ''}{debug}") - logger.level("INFO", color="", icon=f"{'ℹ️' if show_icon else ''}{info}") - logger.level("SUCCESS", color="", icon=f"{'✅' if show_icon else ''}{success}") - logger.level("WARNING", color="", icon=f"{'⚠️' if show_icon else ''}{warning}") - logger.level("ERROR", color="", icon=f"{'⭕' if show_icon else ''}{error}") +import sys +import loguru +from typing import TYPE_CHECKING +from .config import load_from_yaml +from .language import Language, get_default_lang_code + +logger = loguru.logger +if TYPE_CHECKING: + # avoid sphinx autodoc resolve annotation failed + # because loguru module do not have `Logger` class actually + from loguru import Record + + +def default_filter(record: "Record"): + """默认的日志过滤器,根据 `config.log_level` 配置改变日志等级。""" + log_level = record["extra"].get("nonebot_log_level", "INFO") + levelno = logger.level(log_level).no if isinstance(log_level, str) else log_level + return record["level"].no >= levelno + + +# DEBUG日志格式 +debug_format: str = ( + "{time:YYYY-MM-DD HH:mm:ss} " + "[{level.icon}] " + "<{name}.{module}.{function}:{line}> " + "{message}" +) + +# 默认日志格式 +default_format: str = ( + "{time:MM-DD HH:mm:ss} " + "[{level.icon}] " + "<{name}> " + "{message}" +) + + +def get_format(level: str) -> str: + if level == "DEBUG": + return debug_format + else: + return default_format + + +logger = loguru.logger.bind() + + +def init_log(): + """ + 在语言加载完成后执行 + Returns: + + """ + global logger + + config = load_from_yaml("config.yml") + + logger.remove() + logger.add( + sys.stdout, + level=0, + diagnose=False, + filter=default_filter, + format=get_format(config.get("log_level", "INFO")), + ) + show_icon = config.get("log_icon", True) + lang = Language(get_default_lang_code()) + + debug = lang.get("log.debug", default="==DEBUG") + info = lang.get("log.info", default="===INFO") + success = lang.get("log.success", default="SUCCESS") + warning = lang.get("log.warning", default="WARNING") + error = lang.get("log.error", default="==ERROR") + + logger.level("DEBUG", color="", icon=f"{'🐛' if show_icon else ''}{debug}") + logger.level("INFO", color="", icon=f"{'ℹ️' if show_icon else ''}{info}") + logger.level("SUCCESS", color="", icon=f"{'✅' if show_icon else ''}{success}") + logger.level("WARNING", color="", icon=f"{'⚠️' if show_icon else ''}{warning}") + logger.level("ERROR", color="", icon=f"{'⭕' if show_icon else ''}{error}") diff --git a/src/utils/base/ly_function.py b/src/utils/base/ly_function.py index f97a508c..73084615 100644 --- a/src/utils/base/ly_function.py +++ b/src/utils/base/ly_function.py @@ -1,197 +1,197 @@ -""" -liteyuki function是一种类似于mcfunction的函数,用于在liteyuki中实现一些功能,例如自定义指令等,也可与Python函数绑定 -使用 /function function_name *args **kwargs来调用 -例如 /function test/hello user_id=123456 -可以用于一些轻量级插件的编写,无需Python代码 -SnowyKami -""" -import asyncio -import functools -# cmd *args **kwargs -# api api_name **kwargs -import os -from typing import Any, Awaitable, Callable, Coroutine - -import nonebot -from nonebot import Bot -from nonebot.adapters.satori import bot -from nonebot.internal.matcher import Matcher - -ly_function_extensions = ( - "lyf", - "lyfunction", - "mcfunction" -) - -loaded_functions = dict() - - -class LiteyukiFunction: - def __init__(self, name: str): - self.name = name - self.functions: list[str] = list() - self.bot: Bot = None - self.kwargs_data = dict() - self.args_data = list() - self.matcher: Matcher = None - self.end = False - - self.sub_tasks: list[asyncio.Task] = list() - - async def __call__(self, *args, **kwargs): - self.kwargs_data.update(kwargs) - self.args_data = list(set(self.args_data + list(args))) - for i, cmd in enumerate(self.functions): - r = await self.execute_line(cmd, i, *args, **kwargs) - if r == 0: - msg = f"End function {self.name} by line {i}" - nonebot.logger.debug(msg) - for task in self.sub_tasks: - task.cancel(msg) - return - - def __str__(self): - return f"LiteyukiFunction({self.name})" - - def __repr__(self): - return self.__str__() - - async def execute_line(self, cmd: str, line: int = 0, *args, **kwargs) -> Any: - """ - 解析一行轻雪函数 - Args: - cmd: 命令 - line: 行数 - Returns: - """ - - try: - if "${" in cmd: - # 此种情况下,{}内容不用管,只对${}内的内容进行format - for i in range(len(cmd) - 1): - if cmd[i] == "$" and cmd[i + 1] == "{": - end = cmd.find("}", i) - key = cmd[i + 2:end] - cmd = cmd.replace(f"${{{key}}}", str(self.kwargs_data.get(key, ""))) - else: - cmd = cmd.format(*self.args_data, **self.kwargs_data) - except Exception as e: - pass - - no_head = cmd.split(" ", 1)[1] if len(cmd.split(" ")) > 1 else "" - try: - head, cmd_args, cmd_kwargs = self.get_args(cmd) - except Exception as e: - error_msg = f"Parsing error in {self.name} at line {line}: {e}" - nonebot.logger.error(error_msg) - await self.matcher.send(error_msg) - return - - if head == "var": - # 变量定义 - self.kwargs_data.update(cmd_kwargs) - - elif head == "cmd": - # 在当前计算机上执行命令 - os.system(no_head) - - elif head == "api": - # 调用Bot API 需要Bot实例 - await self.bot.call_api(cmd_args[1], **cmd_kwargs) - - elif head == "function": - # 调用轻雪函数 - func = get_function(cmd_args[1]) - func.bot = self.bot - func.matcher = self.matcher - await func(*cmd_args[2:], **cmd_kwargs) - - elif head == "sleep": - # 等待一段时间 - await asyncio.sleep(float(cmd_args[1])) - - elif head == "nohup": - # 挂起运行 - task = asyncio.create_task(self.execute_line(no_head)) - self.sub_tasks.append(task) - - elif head == "end": - # 结束所有函数 - self.end = True - return 0 - - - elif head == "await": - # 等待所有协程执行完毕 - await asyncio.gather(*self.sub_tasks) - - def get_args(self, line: str) -> tuple[str, tuple[str, ...], dict[str, Any]]: - """ - 获取参数 - Args: - line: 命令 - Returns: - 命令头 参数 关键字 - """ - line = line.replace("\\=", "EQUAL_SIGN") - head = "" - args = list() - kwargs = dict() - for i, arg in enumerate(line.split(" ")): - if "=" in arg: - key, value = arg.split("=", 1) - value = value.replace("EQUAL_SIGN", "=") - try: - value = eval(value) - except: - value = self.kwargs_data.get(value, value) - kwargs[key] = value - else: - if i == 0: - head = arg - args.append(arg) - return head, tuple(args), kwargs - - -def get_function(name: str) -> LiteyukiFunction | None: - """ - 获取一个轻雪函数 - Args: - name: 函数名 - Returns: - """ - return loaded_functions.get(name) - - -def load_from_dir(path: str): - """ - 从目录及其子目录中递归加载所有轻雪函数,类似mcfunction - - Args: - path: 目录路径 - """ - for f in os.listdir(path): - f = os.path.join(path, f) - if os.path.isfile(f): - if f.endswith(ly_function_extensions): - load_from_file(f) - if os.path.isdir(f): - load_from_dir(f) - - -def load_from_file(path: str): - """ - 从文件中加载轻雪函数 - Args: - path: - Returns: - """ - with open(path, "r", encoding="utf-8") as f: - name = ".".join(os.path.basename(path).split(".")[:-1]) - func = LiteyukiFunction(name) - for i, line in enumerate(f.read().split("\n")): - if line.startswith("#") or line.strip() == "": - continue - func.functions.append(line) - loaded_functions[name] = func - nonebot.logger.debug(f"Loaded function {name}") +""" +liteyuki function是一种类似于mcfunction的函数,用于在liteyuki中实现一些功能,例如自定义指令等,也可与Python函数绑定 +使用 /function function_name *args **kwargs来调用 +例如 /function test/hello user_id=123456 +可以用于一些轻量级插件的编写,无需Python代码 +SnowyKami +""" +import asyncio +import functools +# cmd *args **kwargs +# api api_name **kwargs +import os +from typing import Any, Awaitable, Callable, Coroutine + +import nonebot +from nonebot import Bot +from nonebot.adapters.satori import bot +from nonebot.internal.matcher import Matcher + +ly_function_extensions = ( + "lyf", + "lyfunction", + "mcfunction" +) + +loaded_functions = dict() + + +class LiteyukiFunction: + def __init__(self, name: str): + self.name = name + self.functions: list[str] = list() + self.bot: Bot = None + self.kwargs_data = dict() + self.args_data = list() + self.matcher: Matcher = None + self.end = False + + self.sub_tasks: list[asyncio.Task] = list() + + async def __call__(self, *args, **kwargs): + self.kwargs_data.update(kwargs) + self.args_data = list(set(self.args_data + list(args))) + for i, cmd in enumerate(self.functions): + r = await self.execute_line(cmd, i, *args, **kwargs) + if r == 0: + msg = f"End function {self.name} by line {i}" + nonebot.logger.debug(msg) + for task in self.sub_tasks: + task.cancel(msg) + return + + def __str__(self): + return f"LiteyukiFunction({self.name})" + + def __repr__(self): + return self.__str__() + + async def execute_line(self, cmd: str, line: int = 0, *args, **kwargs) -> Any: + """ + 解析一行轻雪函数 + Args: + cmd: 命令 + line: 行数 + Returns: + """ + + try: + if "${" in cmd: + # 此种情况下,{}内容不用管,只对${}内的内容进行format + for i in range(len(cmd) - 1): + if cmd[i] == "$" and cmd[i + 1] == "{": + end = cmd.find("}", i) + key = cmd[i + 2:end] + cmd = cmd.replace(f"${{{key}}}", str(self.kwargs_data.get(key, ""))) + else: + cmd = cmd.format(*self.args_data, **self.kwargs_data) + except Exception as e: + pass + + no_head = cmd.split(" ", 1)[1] if len(cmd.split(" ")) > 1 else "" + try: + head, cmd_args, cmd_kwargs = self.get_args(cmd) + except Exception as e: + error_msg = f"Parsing error in {self.name} at line {line}: {e}" + nonebot.logger.error(error_msg) + await self.matcher.send(error_msg) + return + + if head == "var": + # 变量定义 + self.kwargs_data.update(cmd_kwargs) + + elif head == "cmd": + # 在当前计算机上执行命令 + os.system(no_head) + + elif head == "api": + # 调用Bot API 需要Bot实例 + await self.bot.call_api(cmd_args[1], **cmd_kwargs) + + elif head == "function": + # 调用轻雪函数 + func = get_function(cmd_args[1]) + func.bot = self.bot + func.matcher = self.matcher + await func(*cmd_args[2:], **cmd_kwargs) + + elif head == "sleep": + # 等待一段时间 + await asyncio.sleep(float(cmd_args[1])) + + elif head == "nohup": + # 挂起运行 + task = asyncio.create_task(self.execute_line(no_head)) + self.sub_tasks.append(task) + + elif head == "end": + # 结束所有函数 + self.end = True + return 0 + + + elif head == "await": + # 等待所有协程执行完毕 + await asyncio.gather(*self.sub_tasks) + + def get_args(self, line: str) -> tuple[str, tuple[str, ...], dict[str, Any]]: + """ + 获取参数 + Args: + line: 命令 + Returns: + 命令头 参数 关键字 + """ + line = line.replace("\\=", "EQUAL_SIGN") + head = "" + args = list() + kwargs = dict() + for i, arg in enumerate(line.split(" ")): + if "=" in arg: + key, value = arg.split("=", 1) + value = value.replace("EQUAL_SIGN", "=") + try: + value = eval(value) + except: + value = self.kwargs_data.get(value, value) + kwargs[key] = value + else: + if i == 0: + head = arg + args.append(arg) + return head, tuple(args), kwargs + + +def get_function(name: str) -> LiteyukiFunction | None: + """ + 获取一个轻雪函数 + Args: + name: 函数名 + Returns: + """ + return loaded_functions.get(name) + + +def load_from_dir(path: str): + """ + 从目录及其子目录中递归加载所有轻雪函数,类似mcfunction + + Args: + path: 目录路径 + """ + for f in os.listdir(path): + f = os.path.join(path, f) + if os.path.isfile(f): + if f.endswith(ly_function_extensions): + load_from_file(f) + if os.path.isdir(f): + load_from_dir(f) + + +def load_from_file(path: str): + """ + 从文件中加载轻雪函数 + Args: + path: + Returns: + """ + with open(path, "r", encoding="utf-8") as f: + name = ".".join(os.path.basename(path).split(".")[:-1]) + func = LiteyukiFunction(name) + for i, line in enumerate(f.read().split("\n")): + if line.startswith("#") or line.strip() == "": + continue + func.functions.append(line) + loaded_functions[name] = func + nonebot.logger.debug(f"Loaded function {name}") diff --git a/src/utils/base/ly_typing.py b/src/utils/base/ly_typing.py index bbda42e4..31242e18 100644 --- a/src/utils/base/ly_typing.py +++ b/src/utils/base/ly_typing.py @@ -1,8 +1,8 @@ -from nonebot.adapters.onebot import v11, v12 -from nonebot.adapters import satori - -T_Bot = v11.Bot | v12.Bot | satori.Bot -T_GroupMessageEvent = v11.GroupMessageEvent | v12.GroupMessageEvent -T_PrivateMessageEvent = v11.PrivateMessageEvent | v12.PrivateMessageEvent -T_MessageEvent = v11.MessageEvent | v12.MessageEvent | satori.MessageEvent -T_Message = v11.Message | v12.Message | satori.Message +from nonebot.adapters.onebot import v11, v12 +from nonebot.adapters import satori + +T_Bot = v11.Bot | v12.Bot | satori.Bot +T_GroupMessageEvent = v11.GroupMessageEvent | v12.GroupMessageEvent +T_PrivateMessageEvent = v11.PrivateMessageEvent | v12.PrivateMessageEvent +T_MessageEvent = v11.MessageEvent | v12.MessageEvent | satori.MessageEvent +T_Message = v11.Message | v12.Message | satori.Message diff --git a/src/utils/base/permission.py b/src/utils/base/permission.py index 8626ac3e..581e88c9 100644 --- a/src/utils/base/permission.py +++ b/src/utils/base/permission.py @@ -1,5 +1,5 @@ -from nonebot.adapters.onebot import v11 - -GROUP_ADMIN = v11.GROUP_ADMIN -GROUP_OWNER = v11.GROUP_OWNER - +from nonebot.adapters.onebot import v11 + +GROUP_ADMIN = v11.GROUP_ADMIN +GROUP_OWNER = v11.GROUP_OWNER + diff --git a/src/utils/base/resource.py b/src/utils/base/resource.py index 2342e275..b9d76932 100644 --- a/src/utils/base/resource.py +++ b/src/utils/base/resource.py @@ -1,355 +1,355 @@ -import json -import os -import shutil -import zipfile -from typing import Any -from pathlib import Path - -import aiofiles -import nonebot -import yaml - -from .data import LiteModel -from .language import Language, get_default_lang_code -from .ly_function import loaded_functions - -_loaded_resource_packs: list["ResourceMetadata"] = [] # 按照加载顺序排序 -temp_resource_root = Path("data/liteyuki/resources") -temp_extract_root = Path("data/liteyuki/temp") -lang = Language(get_default_lang_code()) - - - - -class ResourceMetadata(LiteModel): - name: str = "Unknown" - version: str = "0.0.1" - description: str = "Unknown" - path: str = "" - folder: str = "" - - -def load_resource_from_dir(path: str): - """ - 把资源包按照文件相对路径复制到运行临时文件夹data/liteyuki/resources - Args: - path: 资源文件夹 - Returns: - """ - if os.path.exists(os.path.join(path, "metadata.yml")): - with open(os.path.join(path, "metadata.yml"), "r", encoding="utf-8") as f: - metadata = yaml.safe_load(f) - elif os.path.isfile(path) and path.endswith(".zip"): - # zip文件 - # 临时解压并读取metadata.yml - with zipfile.ZipFile(path, "r") as zip_ref: - # 解压至临时目录 data/liteyuki/temp/{pack_name}.zip - zip_ref.extractall(os.path.join(temp_extract_root, os.path.basename(path))) - with zip_ref.open("metadata.yml") as f: - metadata = yaml.safe_load(f) - path = os.path.join(temp_extract_root, os.path.basename(path)) - else: - # 没有metadata.yml文件,不是一个资源包 - return - for root, dirs, files in os.walk(path): - for file in files: - relative_path = os.path.relpath(os.path.join(root, file), path) - copy_file( - os.path.join(root, file), - os.path.join(temp_resource_root, relative_path), - ) - metadata["path"] = path - metadata["folder"] = os.path.basename(path) - - if os.path.exists(os.path.join(path, "lang")): - # 加载语言 - from src.utils.base.language import load_from_dir - - load_from_dir(os.path.join(path, "lang")) - - if os.path.exists(os.path.join(path, "functions")): - # 加载功能 - from src.utils.base.ly_function import load_from_dir - - load_from_dir(os.path.join(path, "functions")) - - if os.path.exists(os.path.join(path, "word_bank")): - # 加载词库 - from src.utils.base.word_bank import load_from_dir - - load_from_dir(os.path.join(path, "word_bank")) - - _loaded_resource_packs.insert(0, ResourceMetadata(**metadata)) - - -def get_path( - path: os.PathLike[str,] | Path | str, - abs_path: bool = True, - default: Any = None, - debug: bool = False, -) -> str | Any: - """ - 获取资源包中的路径,且该路径必须存在 - Args: - path: 相对路径 - abs_path: 是否返回绝对路径 - default: 默认解,当该路径不存在时使用 - debug: 启用调试,每次都会先重载资源 - Returns: 所需求之路径 - """ - if debug: - nonebot.logger.debug("Resource path debug enabled, reloading") - load_resources() - resource_relative_path = temp_resource_root / path - if resource_relative_path.exists(): - return str( - resource_relative_path.resolve() if abs_path else resource_relative_path - ) - else: - return default - - -def get_resource_path( - path: os.PathLike[str,] | Path | str, - abs_path: bool = True, - only_exist: bool = False, - default: Any = None, - debug: bool = False, -) -> Path: - """ - 获取资源包中的路径 - Args: - path: 相对路径 - abs_path: 是否返回绝对路径 - only_exist: 检查该路径是否存在 - default: [当 `only_exist` 为 **真** 时启用]默认解,当该路径不存在时使用 - debug: 启用调试,每次都会先重载资源 - Returns: 所需求之路径 - """ - if debug: - nonebot.logger.debug("Resource path debug enabled, reloading") - load_resources() - resource_relative_path = ( - (temp_resource_root / path).resolve() - if abs_path - else (temp_resource_root / path) - ) - if only_exist: - if resource_relative_path.exists(): - return resource_relative_path - else: - return default - else: - return resource_relative_path - - -def get_files( - path: os.PathLike[str,] | Path | str, abs_path: bool = False -) -> list[str]: - """ - 获取资源包中一个目录的所有内容 - Args: - path: 该目录的相对路径 - abs_path: 是否返回绝对路径 - Returns: 目录内容路径所构成之列表 - """ - resource_relative_path = temp_resource_root / path - if resource_relative_path.exists(): - return [ - ( - str((resource_relative_path / file_).resolve()) - if abs_path - else str((resource_relative_path / file_)) - ) - for file_ in os.listdir(resource_relative_path) - ] - else: - return [] - - -def get_resource_files( - path: os.PathLike[str,] | Path | str, abs_path: bool = False -) -> list[Path]: - """ - 获取资源包中一个目录的所有内容 - Args: - path: 该目录的相对路径 - abs_path: 是否返回绝对路径 - Returns: 目录内容路径所构成之列表 - """ - resource_relative_path = temp_resource_root / path - if resource_relative_path.exists(): - return [ - ( - (resource_relative_path / file_).resolve() - if abs_path - else (resource_relative_path / file_) - ) - for file_ in os.listdir(resource_relative_path) - ] - else: - return [] - - -def get_loaded_resource_packs() -> list[ResourceMetadata]: - """ - 获取已加载的资源包,优先级从前到后 - Returns: 资源包列表 - """ - return _loaded_resource_packs - - -def copy_file(src, dst): - # 获取目标文件的目录 - dst_dir = os.path.dirname(dst) - # 如果目标目录不存在,创建它 - if not os.path.exists(dst_dir): - os.makedirs(dst_dir) - # 复制文件 - shutil.copy(src, dst) - - -def load_resources(): - """用于外部主程序调用的资源加载函数 - Returns: - """ - # 加载默认资源和语言 - # 清空临时资源包路径data/liteyuki/resources - _loaded_resource_packs.clear() - loaded_functions.clear() - if os.path.exists(temp_resource_root): - shutil.rmtree(temp_resource_root) - os.makedirs(temp_resource_root, exist_ok=True) - - # 加载内置资源 - standard_resources_path = "src/resources" - for resource_dir in os.listdir(standard_resources_path): - load_resource_from_dir(os.path.join(standard_resources_path, resource_dir)) - - # 加载其他资源包 - if not os.path.exists("resources"): - os.makedirs("resources", exist_ok=True) - - if not os.path.exists("resources/index.json"): - json.dump([], open("resources/index.json", "w", encoding="utf-8")) - - resource_index: list[str] = json.load( - open("resources/index.json", "r", encoding="utf-8") - ) - resource_index.reverse() # 优先级高的后加载,但是排在前面 - for resource in resource_index: - load_resource_from_dir(os.path.join("resources", resource)) - - -def check_status(name: str) -> bool: - """ - 检查资源包是否已加载 - Args: - name: 资源包名称,文件夹名 - Returns: 是否已加载 - """ - return name in [rp.folder for rp in get_loaded_resource_packs()] - - -def check_exist(name: str) -> bool: - """ - 检查资源包文件夹是否存在于resources文件夹 - Args: - name: 资源包名称,文件夹名 - Returns: 是否存在 - """ - path = os.path.join("resources", name) - return os.path.exists(os.path.join(path, "metadata.yml")) or ( - os.path.isfile(path) and name.endswith(".zip") - ) - - -def add_resource_pack(name: str) -> bool: - """ - 添加资源包,该操作仅修改index.json文件,不会加载资源包,要生效请重载资源 - Args: - name: 资源包名称,文件夹名 - Returns: - """ - if check_exist(name): - old_index: list[str] = json.load( - open("resources/index.json", "r", encoding="utf-8") - ) - if name not in old_index: - old_index.append(name) - json.dump(old_index, open("resources/index.json", "w", encoding="utf-8")) - load_resource_from_dir(os.path.join("resources", name)) - return True - else: - nonebot.logger.warning(lang.get("liteyuki.resource_loaded", name=name)) - return False - else: - nonebot.logger.warning(lang.get("liteyuki.resource_not_exist", name=name)) - return False - - -def remove_resource_pack(name: str) -> bool: - """ - 移除资源包,该操作仅修改加载索引,要生效请重载资源 - Args: - name: 资源包名称,文件夹名 - Returns: - """ - if check_exist(name): - old_index: list[str] = json.load( - open("resources/index.json", "r", encoding="utf-8") - ) - if name in old_index: - old_index.remove(name) - json.dump(old_index, open("resources/index.json", "w", encoding="utf-8")) - return True - else: - nonebot.logger.warning(lang.get("liteyuki.resource_not_loaded", name=name)) - return False - else: - nonebot.logger.warning(lang.get("liteyuki.resource_not_exist", name=name)) - return False - - -def change_priority(name: str, delta: int) -> bool: - """ - 修改资源包优先级 - Args: - name: 资源包名称,文件夹名 - delta: 优先级变化,正数表示后移,负数表示前移,0表示移到最前 - Returns: - """ - # 正数表示前移,负数表示后移 - old_resource_list: list[str] = json.load( - open("resources/index.json", "r", encoding="utf-8") - ) - new_resource_list = old_resource_list.copy() - if name in old_resource_list: - index = old_resource_list.index(name) - if 0 <= index + delta < len(old_resource_list): - new_index = index + delta - new_resource_list.remove(name) - new_resource_list.insert(new_index, name) - json.dump( - new_resource_list, open("resources/index.json", "w", encoding="utf-8") - ) - return True - else: - nonebot.logger.warning("Priority change failed, out of range") - return False - else: - nonebot.logger.debug("Priority change failed, resource not loaded") - return False - - -def get_resource_metadata(name: str) -> ResourceMetadata: - """ - 获取资源包元数据 - Args: - name: 资源包名称,文件夹名 - Returns: - """ - for rp in get_loaded_resource_packs(): - if rp.folder == name: - return rp - return ResourceMetadata() +import json +import os +import shutil +import zipfile +from typing import Any +from pathlib import Path + +import aiofiles +import nonebot +import yaml + +from .data import LiteModel +from .language import Language, get_default_lang_code +from .ly_function import loaded_functions + +_loaded_resource_packs: list["ResourceMetadata"] = [] # 按照加载顺序排序 +temp_resource_root = Path("data/liteyuki/resources") +temp_extract_root = Path("data/liteyuki/temp") +lang = Language(get_default_lang_code()) + + + + +class ResourceMetadata(LiteModel): + name: str = "Unknown" + version: str = "0.0.1" + description: str = "Unknown" + path: str = "" + folder: str = "" + + +def load_resource_from_dir(path: str): + """ + 把资源包按照文件相对路径复制到运行临时文件夹data/liteyuki/resources + Args: + path: 资源文件夹 + Returns: + """ + if os.path.exists(os.path.join(path, "metadata.yml")): + with open(os.path.join(path, "metadata.yml"), "r", encoding="utf-8") as f: + metadata = yaml.safe_load(f) + elif os.path.isfile(path) and path.endswith(".zip"): + # zip文件 + # 临时解压并读取metadata.yml + with zipfile.ZipFile(path, "r") as zip_ref: + # 解压至临时目录 data/liteyuki/temp/{pack_name}.zip + zip_ref.extractall(os.path.join(temp_extract_root, os.path.basename(path))) + with zip_ref.open("metadata.yml") as f: + metadata = yaml.safe_load(f) + path = os.path.join(temp_extract_root, os.path.basename(path)) + else: + # 没有metadata.yml文件,不是一个资源包 + return + for root, dirs, files in os.walk(path): + for file in files: + relative_path = os.path.relpath(os.path.join(root, file), path) + copy_file( + os.path.join(root, file), + os.path.join(temp_resource_root, relative_path), + ) + metadata["path"] = path + metadata["folder"] = os.path.basename(path) + + if os.path.exists(os.path.join(path, "lang")): + # 加载语言 + from src.utils.base.language import load_from_dir + + load_from_dir(os.path.join(path, "lang")) + + if os.path.exists(os.path.join(path, "functions")): + # 加载功能 + from src.utils.base.ly_function import load_from_dir + + load_from_dir(os.path.join(path, "functions")) + + if os.path.exists(os.path.join(path, "word_bank")): + # 加载词库 + from src.utils.base.word_bank import load_from_dir + + load_from_dir(os.path.join(path, "word_bank")) + + _loaded_resource_packs.insert(0, ResourceMetadata(**metadata)) + + +def get_path( + path: os.PathLike[str,] | Path | str, + abs_path: bool = True, + default: Any = None, + debug: bool = False, +) -> str | Any: + """ + 获取资源包中的路径,且该路径必须存在 + Args: + path: 相对路径 + abs_path: 是否返回绝对路径 + default: 默认解,当该路径不存在时使用 + debug: 启用调试,每次都会先重载资源 + Returns: 所需求之路径 + """ + if debug: + nonebot.logger.debug("Resource path debug enabled, reloading") + load_resources() + resource_relative_path = temp_resource_root / path + if resource_relative_path.exists(): + return str( + resource_relative_path.resolve() if abs_path else resource_relative_path + ) + else: + return default + + +def get_resource_path( + path: os.PathLike[str,] | Path | str, + abs_path: bool = True, + only_exist: bool = False, + default: Any = None, + debug: bool = False, +) -> Path: + """ + 获取资源包中的路径 + Args: + path: 相对路径 + abs_path: 是否返回绝对路径 + only_exist: 检查该路径是否存在 + default: [当 `only_exist` 为 **真** 时启用]默认解,当该路径不存在时使用 + debug: 启用调试,每次都会先重载资源 + Returns: 所需求之路径 + """ + if debug: + nonebot.logger.debug("Resource path debug enabled, reloading") + load_resources() + resource_relative_path = ( + (temp_resource_root / path).resolve() + if abs_path + else (temp_resource_root / path) + ) + if only_exist: + if resource_relative_path.exists(): + return resource_relative_path + else: + return default + else: + return resource_relative_path + + +def get_files( + path: os.PathLike[str,] | Path | str, abs_path: bool = False +) -> list[str]: + """ + 获取资源包中一个目录的所有内容 + Args: + path: 该目录的相对路径 + abs_path: 是否返回绝对路径 + Returns: 目录内容路径所构成之列表 + """ + resource_relative_path = temp_resource_root / path + if resource_relative_path.exists(): + return [ + ( + str((resource_relative_path / file_).resolve()) + if abs_path + else str((resource_relative_path / file_)) + ) + for file_ in os.listdir(resource_relative_path) + ] + else: + return [] + + +def get_resource_files( + path: os.PathLike[str,] | Path | str, abs_path: bool = False +) -> list[Path]: + """ + 获取资源包中一个目录的所有内容 + Args: + path: 该目录的相对路径 + abs_path: 是否返回绝对路径 + Returns: 目录内容路径所构成之列表 + """ + resource_relative_path = temp_resource_root / path + if resource_relative_path.exists(): + return [ + ( + (resource_relative_path / file_).resolve() + if abs_path + else (resource_relative_path / file_) + ) + for file_ in os.listdir(resource_relative_path) + ] + else: + return [] + + +def get_loaded_resource_packs() -> list[ResourceMetadata]: + """ + 获取已加载的资源包,优先级从前到后 + Returns: 资源包列表 + """ + return _loaded_resource_packs + + +def copy_file(src, dst): + # 获取目标文件的目录 + dst_dir = os.path.dirname(dst) + # 如果目标目录不存在,创建它 + if not os.path.exists(dst_dir): + os.makedirs(dst_dir) + # 复制文件 + shutil.copy(src, dst) + + +def load_resources(): + """用于外部主程序调用的资源加载函数 + Returns: + """ + # 加载默认资源和语言 + # 清空临时资源包路径data/liteyuki/resources + _loaded_resource_packs.clear() + loaded_functions.clear() + if os.path.exists(temp_resource_root): + shutil.rmtree(temp_resource_root) + os.makedirs(temp_resource_root, exist_ok=True) + + # 加载内置资源 + standard_resources_path = "src/resources" + for resource_dir in os.listdir(standard_resources_path): + load_resource_from_dir(os.path.join(standard_resources_path, resource_dir)) + + # 加载其他资源包 + if not os.path.exists("resources"): + os.makedirs("resources", exist_ok=True) + + if not os.path.exists("resources/index.json"): + json.dump([], open("resources/index.json", "w", encoding="utf-8")) + + resource_index: list[str] = json.load( + open("resources/index.json", "r", encoding="utf-8") + ) + resource_index.reverse() # 优先级高的后加载,但是排在前面 + for resource in resource_index: + load_resource_from_dir(os.path.join("resources", resource)) + + +def check_status(name: str) -> bool: + """ + 检查资源包是否已加载 + Args: + name: 资源包名称,文件夹名 + Returns: 是否已加载 + """ + return name in [rp.folder for rp in get_loaded_resource_packs()] + + +def check_exist(name: str) -> bool: + """ + 检查资源包文件夹是否存在于resources文件夹 + Args: + name: 资源包名称,文件夹名 + Returns: 是否存在 + """ + path = os.path.join("resources", name) + return os.path.exists(os.path.join(path, "metadata.yml")) or ( + os.path.isfile(path) and name.endswith(".zip") + ) + + +def add_resource_pack(name: str) -> bool: + """ + 添加资源包,该操作仅修改index.json文件,不会加载资源包,要生效请重载资源 + Args: + name: 资源包名称,文件夹名 + Returns: + """ + if check_exist(name): + old_index: list[str] = json.load( + open("resources/index.json", "r", encoding="utf-8") + ) + if name not in old_index: + old_index.append(name) + json.dump(old_index, open("resources/index.json", "w", encoding="utf-8")) + load_resource_from_dir(os.path.join("resources", name)) + return True + else: + nonebot.logger.warning(lang.get("liteyuki.resource_loaded", name=name)) + return False + else: + nonebot.logger.warning(lang.get("liteyuki.resource_not_exist", name=name)) + return False + + +def remove_resource_pack(name: str) -> bool: + """ + 移除资源包,该操作仅修改加载索引,要生效请重载资源 + Args: + name: 资源包名称,文件夹名 + Returns: + """ + if check_exist(name): + old_index: list[str] = json.load( + open("resources/index.json", "r", encoding="utf-8") + ) + if name in old_index: + old_index.remove(name) + json.dump(old_index, open("resources/index.json", "w", encoding="utf-8")) + return True + else: + nonebot.logger.warning(lang.get("liteyuki.resource_not_loaded", name=name)) + return False + else: + nonebot.logger.warning(lang.get("liteyuki.resource_not_exist", name=name)) + return False + + +def change_priority(name: str, delta: int) -> bool: + """ + 修改资源包优先级 + Args: + name: 资源包名称,文件夹名 + delta: 优先级变化,正数表示后移,负数表示前移,0表示移到最前 + Returns: + """ + # 正数表示前移,负数表示后移 + old_resource_list: list[str] = json.load( + open("resources/index.json", "r", encoding="utf-8") + ) + new_resource_list = old_resource_list.copy() + if name in old_resource_list: + index = old_resource_list.index(name) + if 0 <= index + delta < len(old_resource_list): + new_index = index + delta + new_resource_list.remove(name) + new_resource_list.insert(new_index, name) + json.dump( + new_resource_list, open("resources/index.json", "w", encoding="utf-8") + ) + return True + else: + nonebot.logger.warning("Priority change failed, out of range") + return False + else: + nonebot.logger.debug("Priority change failed, resource not loaded") + return False + + +def get_resource_metadata(name: str) -> ResourceMetadata: + """ + 获取资源包元数据 + Args: + name: 资源包名称,文件夹名 + Returns: + """ + for rp in get_loaded_resource_packs(): + if rp.folder == name: + return rp + return ResourceMetadata() diff --git a/src/utils/base/word_bank.py b/src/utils/base/word_bank.py index 81772dcf..43ad4779 100644 --- a/src/utils/base/word_bank.py +++ b/src/utils/base/word_bank.py @@ -1,57 +1,57 @@ -import json -import os -import random -from typing import Iterable - -import nonebot - -word_bank: dict[str, set[str]] = {} - - -def load_from_file(file_path: str): - """ - 从json文件中加载词库 - - Args: - file_path: 文件路径 - """ - with open(file_path, "r", encoding="utf-8") as file: - data = json.load(file) - for key, value_list in data.items(): - if key not in word_bank: - word_bank[key] = set() - word_bank[key].update(value_list) - - nonebot.logger.debug(f"Loaded word bank from {file_path}") - - -def load_from_dir(dir_path: str): - """ - 从目录中加载词库 - - Args: - dir_path: 目录路径 - """ - for file in os.listdir(dir_path): - try: - file_path = os.path.join(dir_path, file) - if os.path.isfile(file_path): - if file.endswith(".json"): - load_from_file(file_path) - except Exception as e: - nonebot.logger.error(f"Failed to load language data from {file}: {e}") - continue - - -def get_reply(kws: Iterable[str]) -> str | None: - """ - 获取回复 - Args: - kws: 关键词 - Returns: - """ - for kw in kws: - if kw in word_bank: - return random.choice(list(word_bank[kw])) - - return None +import json +import os +import random +from typing import Iterable + +import nonebot + +word_bank: dict[str, set[str]] = {} + + +def load_from_file(file_path: str): + """ + 从json文件中加载词库 + + Args: + file_path: 文件路径 + """ + with open(file_path, "r", encoding="utf-8") as file: + data = json.load(file) + for key, value_list in data.items(): + if key not in word_bank: + word_bank[key] = set() + word_bank[key].update(value_list) + + nonebot.logger.debug(f"Loaded word bank from {file_path}") + + +def load_from_dir(dir_path: str): + """ + 从目录中加载词库 + + Args: + dir_path: 目录路径 + """ + for file in os.listdir(dir_path): + try: + file_path = os.path.join(dir_path, file) + if os.path.isfile(file_path): + if file.endswith(".json"): + load_from_file(file_path) + except Exception as e: + nonebot.logger.error(f"Failed to load language data from {file}: {e}") + continue + + +def get_reply(kws: Iterable[str]) -> str | None: + """ + 获取回复 + Args: + kws: 关键词 + Returns: + """ + for kw in kws: + if kw in word_bank: + return random.choice(list(word_bank[kw])) + + return None diff --git a/src/utils/event/__init__.py b/src/utils/event/__init__.py index 766f64be..b028cac6 100644 --- a/src/utils/event/__init__.py +++ b/src/utils/event/__init__.py @@ -1 +1 @@ -from .get_info import * +from .get_info import * diff --git a/src/utils/event/get_info.py b/src/utils/event/get_info.py index d2d3607f..b5e3b2c3 100644 --- a/src/utils/event/get_info.py +++ b/src/utils/event/get_info.py @@ -1,26 +1,26 @@ -from nonebot.adapters import satori -from nonebot.adapters import onebot -from src.utils.base.ly_typing import T_MessageEvent, T_GroupMessageEvent - - -def get_user_id(event: T_MessageEvent): - if isinstance(event, satori.event.Event): - return event.user.id - else: - return event.user_id - - -def get_group_id(event: T_GroupMessageEvent): - if isinstance(event, satori.event.Event): - return event.guild.id - elif isinstance(event, onebot.v11.GroupMessageEvent): - return event.group_id - else: - return None - - -def get_message_type(event: T_MessageEvent) -> str: - if isinstance(event, satori.event.Event): - return "private" if event.guild is None else "group" - else: - return event.message_type +from nonebot.adapters import satori +from nonebot.adapters import onebot +from src.utils.base.ly_typing import T_MessageEvent, T_GroupMessageEvent + + +def get_user_id(event: T_MessageEvent): + if isinstance(event, satori.event.Event): + return event.user.id + else: + return event.user_id + + +def get_group_id(event: T_GroupMessageEvent): + if isinstance(event, satori.event.Event): + return event.guild.id + elif isinstance(event, onebot.v11.GroupMessageEvent): + return event.group_id + else: + return None + + +def get_message_type(event: T_MessageEvent) -> str: + if isinstance(event, satori.event.Event): + return "private" if event.guild is None else "group" + else: + return event.message_type diff --git a/src/utils/external/logo.py b/src/utils/external/logo.py index c714d962..96d9192e 100644 --- a/src/utils/external/logo.py +++ b/src/utils/external/logo.py @@ -1,40 +1,40 @@ -async def get_user_icon(platform: str, user_id: str) -> str: - """ - 获取用户头像 - Args: - platform: qq, telegram, discord... - user_id: 1234567890 - - Returns: - str: 头像链接 - """ - match platform: - case "qq": - return f"http://q1.qlogo.cn/g?b=qq&nk={user_id}&s=640" - case "telegram": - return f"https://t.me/i/userpic/320/{user_id}.jpg" - case "discord": - return f"https://cdn.discordapp.com/avatars/{user_id}/" - case _: - return "" - - -async def get_group_icon(platform: str, group_id: str) -> str: - """ - 获取群组头像 - Args: - platform: qq, telegram, discord... - group_id: 1234567890 - - Returns: - str: 头像链接 - """ - match platform: - case "qq": - return f"http://p.qlogo.cn/gh/{group_id}/{group_id}/640" - case "telegram": - return f"https://t.me/c/{group_id}/" - case "discord": - return f"https://cdn.discordapp.com/icons/{group_id}/" - case _: - return "" +async def get_user_icon(platform: str, user_id: str) -> str: + """ + 获取用户头像 + Args: + platform: qq, telegram, discord... + user_id: 1234567890 + + Returns: + str: 头像链接 + """ + match platform: + case "qq": + return f"http://q1.qlogo.cn/g?b=qq&nk={user_id}&s=640" + case "telegram": + return f"https://t.me/i/userpic/320/{user_id}.jpg" + case "discord": + return f"https://cdn.discordapp.com/avatars/{user_id}/" + case _: + return "" + + +async def get_group_icon(platform: str, group_id: str) -> str: + """ + 获取群组头像 + Args: + platform: qq, telegram, discord... + group_id: 1234567890 + + Returns: + str: 头像链接 + """ + match platform: + case "qq": + return f"http://p.qlogo.cn/gh/{group_id}/{group_id}/640" + case "telegram": + return f"https://t.me/c/{group_id}/" + case "discord": + return f"https://cdn.discordapp.com/icons/{group_id}/" + case _: + return "" diff --git a/src/utils/message/html_tool.py b/src/utils/message/html_tool.py index bd51852f..0fd8100c 100644 --- a/src/utils/message/html_tool.py +++ b/src/utils/message/html_tool.py @@ -1,89 +1,89 @@ -import os -import aiofiles # type: ignore -import nonebot -from nonebot import require - -# require("nonebot_plugin_htmlrender") - -from nonebot_plugin_htmlrender import ( # type: ignore - template_to_html, - template_to_pic, - md_to_pic -) # type: ignore - - -async def template2html( - template: str, - templates: dict, -) -> str: - """ - Args: - template: str: 模板文件 - **templates: dict: 模板参数 - Returns: - HTML 正文 - """ - template_path = os.path.dirname(template) - template_name = os.path.basename(template) - return await template_to_html(template_path, template_name, **templates) - - -async def template2image( - template: str, - templates: dict, - pages=None, - wait: int = 0, - scale_factor: float = 1, - debug: bool = False, -) -> bytes: - """ - template -> html -> image - Args: - debug: 输入渲染好的 html - wait: 等待时间,单位秒 - pages: 页面参数 - template: str: 模板文件 - templates: dict: 模板参数 - scale_factor: 缩放因子,越高越清晰 - Returns: - 图片二进制数据 - """ - - ### - if pages is None: - pages = { - "viewport": { - "width" : 1080, - "height": 10 - }, - } - - template_path = os.path.dirname(template) - template_name = os.path.basename(template) - - if debug: - # 重载资源 - raw_html = await template_to_html( - template_name=template_name, - template_path=template_path, - **templates, - ) - random_file_name = f"debug.html" - async with aiofiles.open( - os.path.join(template_path, random_file_name), "w", encoding="utf-8" - ) as f: - await f.write(raw_html) - nonebot.logger.info("Debug HTML: %s" % f"{random_file_name}") - return await template_to_pic( - template_name=template_name, - template_path=template_path, - templates=templates, - wait=wait, - - ### - pages=pages, - device_scale_factor=scale_factor - ### - ) - - +import os +import aiofiles # type: ignore +import nonebot +from nonebot import require + +# require("nonebot_plugin_htmlrender") + +from nonebot_plugin_htmlrender import ( # type: ignore + template_to_html, + template_to_pic, + md_to_pic +) # type: ignore + + +async def template2html( + template: str, + templates: dict, +) -> str: + """ + Args: + template: str: 模板文件 + **templates: dict: 模板参数 + Returns: + HTML 正文 + """ + template_path = os.path.dirname(template) + template_name = os.path.basename(template) + return await template_to_html(template_path, template_name, **templates) + + +async def template2image( + template: str, + templates: dict, + pages=None, + wait: int = 0, + scale_factor: float = 1, + debug: bool = False, +) -> bytes: + """ + template -> html -> image + Args: + debug: 输入渲染好的 html + wait: 等待时间,单位秒 + pages: 页面参数 + template: str: 模板文件 + templates: dict: 模板参数 + scale_factor: 缩放因子,越高越清晰 + Returns: + 图片二进制数据 + """ + + ### + if pages is None: + pages = { + "viewport": { + "width" : 1080, + "height": 10 + }, + } + + template_path = os.path.dirname(template) + template_name = os.path.basename(template) + + if debug: + # 重载资源 + raw_html = await template_to_html( + template_name=template_name, + template_path=template_path, + **templates, + ) + random_file_name = f"debug.html" + async with aiofiles.open( + os.path.join(template_path, random_file_name), "w", encoding="utf-8" + ) as f: + await f.write(raw_html) + nonebot.logger.info("Debug HTML: %s" % f"{random_file_name}") + return await template_to_pic( + template_name=template_name, + template_path=template_path, + templates=templates, + wait=wait, + + ### + pages=pages, + device_scale_factor=scale_factor + ### + ) + + diff --git a/src/utils/message/markdown.py b/src/utils/message/markdown.py index 35605ca6..bed2073d 100644 --- a/src/utils/message/markdown.py +++ b/src/utils/message/markdown.py @@ -1,209 +1,209 @@ -import base64 -from io import BytesIO -from urllib.parse import quote - -import aiohttp -from PIL import Image - -from ..base.config import get_config -from ..base.data import LiteModel -from ..base.ly_typing import T_Bot - - -def escape_md(text: str) -> str: - """ - 转义Markdown特殊字符 - Args: - text: str: 文本 - - Returns: - str: 转义后文本 - """ - spacial_chars = r"\`*_{}[]()#+-.!" - for char in spacial_chars: - text = text.replace(char, "\\\\" + char) - return text.replace("\n", r"\n").replace('"', r'\\\"') - - -def escape_decorator(func): - def wrapper(text: str): - return func(escape_md(text)) - - return wrapper - - -def compile_md(comps: list[str]) -> str: - """ - 合成Markdown文本 - Args: - comps: list[str]: 组件列表 - - Returns: - str: 编译后文本 - """ - return "".join(comps) - - -class MarkdownComponent: - @staticmethod - def heading(text: str, level: int = 1) -> str: - """标题""" - assert 1 <= level <= 6, "标题级别应在 1-6 之间" - return f"{'#' * level} {text}\n" - - @staticmethod - def bold(text: str) -> str: - """粗体""" - return f"**{text}**" - - @staticmethod - def italic(text: str) -> str: - """斜体""" - return f"*{text}*" - - @staticmethod - def strike(text: str) -> str: - """删除线""" - return f"~~{text}~~" - - @staticmethod - def code(text: str) -> str: - """行内代码""" - return f"`{text}`" - - @staticmethod - def code_block(text: str, language: str = "") -> str: - """代码块""" - return f"```{language}\n{text}\n```\n" - - @staticmethod - def quote(text: str) -> str: - """引用""" - return f"> {text}\n\n" - - @staticmethod - def link(text: str, url: str, symbol: bool = True) -> str: - """ - 链接 - - Args: - text: 链接文本 - url: 链接地址 - symbol: 是否显示链接图标, mqqapi请使用False - """ - return f"[{'🔗' if symbol else ''}{text}]({url})" - - @staticmethod - def image(url: str, *, size: tuple[int, int]) -> str: - """ - 图片,本地图片不建议直接使用 - Args: - url: 图片链接 - size: 图片大小 - - Returns: - markdown格式的图片 - """ - return f"![image #{size[0]}px #{size[1]}px]({url})" - - @staticmethod - async def auto_image(image: str | bytes, bot: T_Bot) -> str: - """ - 自动获取图片大小 - Args: - image: 本地图片路径 | 图片url http/file | 图片bytes - bot: bot对象,用于上传图片到图床 - - Returns: - markdown格式的图片 - """ - if isinstance(image, bytes): - # 传入为二进制图片 - image_obj = Image.open(BytesIO(image)) - base64_string = base64.b64encode(image_obj.tobytes()).decode("utf-8") - url = await bot.call_api("upload_image", file=f"base64://{base64_string}") - size = image_obj.size - elif isinstance(image, str): - # 传入链接或本地路径 - if image.startswith("http"): - # 网络请求 - async with aiohttp.ClientSession() as session: - async with session.get(image) as resp: - image_data = await resp.read() - url = image - size = Image.open(BytesIO(image_data)).size - - else: - # 本地路径/file:// - image_obj = Image.open(image.replace("file://", "")) - base64_string = base64.b64encode(image_obj.tobytes()).decode("utf-8") - url = await bot.call_api("upload_image", file=f"base64://{base64_string}") - size = image_obj.size - else: - raise ValueError("图片类型错误") - - return MarkdownComponent.image(url, size=size) - - @staticmethod - def table(data: list[list[any]]) -> str: - """ - 表格 - Args: - data: 表格数据,二维列表 - Returns: - markdown格式的表格 - """ - # 表头 - table = "|".join(map(str, data[0])) + "\n" - table += "|".join([":-:" for _ in range(len(data[0]))]) + "\n" - # 表内容 - for row in data[1:]: - table += "|".join(map(str, row)) + "\n" - return table - - @staticmethod - def paragraph(text: str) -> str: - """ - 段落 - Args: - text: 段落内容 - Returns: - markdown格式的段落 - """ - return f"{text}\n" - - -class Mqqapi: - @staticmethod - @escape_decorator - def cmd(text: str, cmd: str, enter: bool = True, reply: bool = False, use_cmd_start: bool = True) -> str: - """ - 生成点击回调文本 - Args: - text: 显示内容 - cmd: 命令 - enter: 是否自动发送 - reply: 是否回复 - use_cmd_start: 是否使用配置的命令前缀 - - Returns: - [text](mqqapi://) markdown格式的可点击回调文本,类似于链接 - """ - - if use_cmd_start: - command_start = get_config("command_start", []) - if command_start: - # 若命令前缀不为空,则使用配置的第一个命令前缀 - cmd = f"{command_start[0]}{cmd}" - return f"[{text}](mqqapi://aio/inlinecmd?command={quote(cmd)}&reply={str(reply).lower()}&enter={str(enter).lower()})" - - -class RenderData(LiteModel): - label: str - visited_label: str - style: int - - -class Button(LiteModel): - id: int - render_data: RenderData +import base64 +from io import BytesIO +from urllib.parse import quote + +import aiohttp +from PIL import Image + +from ..base.config import get_config +from ..base.data import LiteModel +from ..base.ly_typing import T_Bot + + +def escape_md(text: str) -> str: + """ + 转义Markdown特殊字符 + Args: + text: str: 文本 + + Returns: + str: 转义后文本 + """ + spacial_chars = r"\`*_{}[]()#+-.!" + for char in spacial_chars: + text = text.replace(char, "\\\\" + char) + return text.replace("\n", r"\n").replace('"', r'\\\"') + + +def escape_decorator(func): + def wrapper(text: str): + return func(escape_md(text)) + + return wrapper + + +def compile_md(comps: list[str]) -> str: + """ + 合成Markdown文本 + Args: + comps: list[str]: 组件列表 + + Returns: + str: 编译后文本 + """ + return "".join(comps) + + +class MarkdownComponent: + @staticmethod + def heading(text: str, level: int = 1) -> str: + """标题""" + assert 1 <= level <= 6, "标题级别应在 1-6 之间" + return f"{'#' * level} {text}\n" + + @staticmethod + def bold(text: str) -> str: + """粗体""" + return f"**{text}**" + + @staticmethod + def italic(text: str) -> str: + """斜体""" + return f"*{text}*" + + @staticmethod + def strike(text: str) -> str: + """删除线""" + return f"~~{text}~~" + + @staticmethod + def code(text: str) -> str: + """行内代码""" + return f"`{text}`" + + @staticmethod + def code_block(text: str, language: str = "") -> str: + """代码块""" + return f"```{language}\n{text}\n```\n" + + @staticmethod + def quote(text: str) -> str: + """引用""" + return f"> {text}\n\n" + + @staticmethod + def link(text: str, url: str, symbol: bool = True) -> str: + """ + 链接 + + Args: + text: 链接文本 + url: 链接地址 + symbol: 是否显示链接图标, mqqapi请使用False + """ + return f"[{'🔗' if symbol else ''}{text}]({url})" + + @staticmethod + def image(url: str, *, size: tuple[int, int]) -> str: + """ + 图片,本地图片不建议直接使用 + Args: + url: 图片链接 + size: 图片大小 + + Returns: + markdown格式的图片 + """ + return f"![image #{size[0]}px #{size[1]}px]({url})" + + @staticmethod + async def auto_image(image: str | bytes, bot: T_Bot) -> str: + """ + 自动获取图片大小 + Args: + image: 本地图片路径 | 图片url http/file | 图片bytes + bot: bot对象,用于上传图片到图床 + + Returns: + markdown格式的图片 + """ + if isinstance(image, bytes): + # 传入为二进制图片 + image_obj = Image.open(BytesIO(image)) + base64_string = base64.b64encode(image_obj.tobytes()).decode("utf-8") + url = await bot.call_api("upload_image", file=f"base64://{base64_string}") + size = image_obj.size + elif isinstance(image, str): + # 传入链接或本地路径 + if image.startswith("http"): + # 网络请求 + async with aiohttp.ClientSession() as session: + async with session.get(image) as resp: + image_data = await resp.read() + url = image + size = Image.open(BytesIO(image_data)).size + + else: + # 本地路径/file:// + image_obj = Image.open(image.replace("file://", "")) + base64_string = base64.b64encode(image_obj.tobytes()).decode("utf-8") + url = await bot.call_api("upload_image", file=f"base64://{base64_string}") + size = image_obj.size + else: + raise ValueError("图片类型错误") + + return MarkdownComponent.image(url, size=size) + + @staticmethod + def table(data: list[list[any]]) -> str: + """ + 表格 + Args: + data: 表格数据,二维列表 + Returns: + markdown格式的表格 + """ + # 表头 + table = "|".join(map(str, data[0])) + "\n" + table += "|".join([":-:" for _ in range(len(data[0]))]) + "\n" + # 表内容 + for row in data[1:]: + table += "|".join(map(str, row)) + "\n" + return table + + @staticmethod + def paragraph(text: str) -> str: + """ + 段落 + Args: + text: 段落内容 + Returns: + markdown格式的段落 + """ + return f"{text}\n" + + +class Mqqapi: + @staticmethod + @escape_decorator + def cmd(text: str, cmd: str, enter: bool = True, reply: bool = False, use_cmd_start: bool = True) -> str: + """ + 生成点击回调文本 + Args: + text: 显示内容 + cmd: 命令 + enter: 是否自动发送 + reply: 是否回复 + use_cmd_start: 是否使用配置的命令前缀 + + Returns: + [text](mqqapi://) markdown格式的可点击回调文本,类似于链接 + """ + + if use_cmd_start: + command_start = get_config("command_start", []) + if command_start: + # 若命令前缀不为空,则使用配置的第一个命令前缀 + cmd = f"{command_start[0]}{cmd}" + return f"[{text}](mqqapi://aio/inlinecmd?command={quote(cmd)}&reply={str(reply).lower()}&enter={str(enter).lower()})" + + +class RenderData(LiteModel): + label: str + visited_label: str + style: int + + +class Button(LiteModel): + id: int + render_data: RenderData diff --git a/src/utils/message/message.py b/src/utils/message/message.py index f177f0a2..dd60e6d3 100644 --- a/src/utils/message/message.py +++ b/src/utils/message/message.py @@ -1,202 +1,202 @@ -import base64 -import io -from typing import Any -from urllib.parse import quote - -import aiofiles -import aiohttp -import nonebot -from PIL import Image -from nonebot.adapters.onebot import v11 - -from .html_tool import md_to_pic -from .. import load_from_yaml -from ..base.ly_typing import T_Bot, T_Message, T_MessageEvent - -config = load_from_yaml("config.yml") - - -async def broadcast_to_superusers(message: str | T_Message, markdown: bool = False): - """广播消息给超级用户""" - for bot in nonebot.get_bots().values(): - for user_id in config.get("superusers", []): - if markdown: - await MarkdownMessage.send_md(message, bot, message_type="private", session_id=user_id) - else: - await bot.send_private_msg(user_id=user_id, message=message) - - -class MarkdownMessage: - @staticmethod - async def send_md( - markdown: str, - bot: T_Bot, *, - message_type: str = None, - session_id: str | int = None - ) -> dict[str, Any] | None: - """ - 发送Markdown消息,支持自动转为图片发送 - Args: - markdown: - bot: - message_type: - session_id: - Returns: - - """ - plain_markdown = markdown.replace("[🔗", "[") - md_image_bytes = await md_to_pic( - md=plain_markdown, - width=540, - device_scale_factor=4 - ) - print(md_image_bytes) - data = await bot.send_msg( - message_type=message_type, - group_id=session_id, - user_id=session_id, - message=v11.MessageSegment.image(md_image_bytes), - ) - return data - - @staticmethod - async def send_image( - image: bytes | str, - bot: T_Bot, *, - message_type: str = None, - session_id: str | int = None, - event: T_MessageEvent = None, - **kwargs - ) -> dict: - """ - 发送单张装逼大图 - Args: - image: 图片字节流或图片本地路径,链接请使用Markdown.image_async方法获取后通过send_md发送 - bot: bot instance - message_type: message message_type - session_id: session id - event: event - kwargs: other arguments - Returns: - dict: response data - """ - if isinstance(image, str): - async with aiofiles.open(image, "rb") as f: - image = await f.read() - method = 2 - if method == 2: - base64_string = base64.b64encode(image).decode("utf-8") - data = await bot.call_api("upload_image", file=f"base64://{base64_string}") - await MarkdownMessage.send_md(MarkdownMessage.image(data, Image.open(io.BytesIO(image)).size), bot, - message_type=message_type, - session_id=session_id) - - # 其他实现端方案 - else: - image_message_id = (await bot.send_private_msg( - user_id=bot.self_id, - message=[ - v11.MessageSegment.image(file=image) - ] - ))["message_id"] - image_url = (await bot.get_msg(message_id=image_message_id))["message"][0]["data"]["url"] - image_size = Image.open(io.BytesIO(image)).size - image_md = MarkdownMessage.image(image_url, image_size) - return await MarkdownMessage.send_md(image_md, bot, message_type=message_type, session_id=session_id) - - if data is None: - data = await bot.send_msg( - message_type=message_type, - group_id=session_id, - user_id=session_id, - message=v11.MessageSegment.image(image), - **kwargs - ) - return data - - @staticmethod - async def get_image_url(image: bytes | str, bot: T_Bot) -> str: - """把图片上传到图床,返回链接 - Args: - bot: 发送的bot - image: 图片字节流或图片本地路径 - Returns: - """ - # 等林文轩修好Lagrange.OneBot再说 - - @staticmethod - def btn_cmd(name: str, cmd: str, reply: bool = False, enter: bool = True) -> str: - """生成点击回调按钮 - Args: - name: 按钮显示内容 - cmd: 发送的命令,已在函数内url编码,不需要再次编码 - reply: 是否以回复的方式发送消息 - enter: 自动发送消息则为True,否则填充到输入框 - - Returns: - markdown格式的可点击回调按钮 - - """ - if "" not in config.get("command_start", ["/"]) and config.get("alconna_use_command_start", False): - cmd = f"{config['command_start'][0]}{cmd}" - return f"[{name}](mqqapi://aio/inlinecmd?command={quote(cmd)}&reply={str(reply).lower()}&enter={str(enter).lower()})" - - @staticmethod - def btn_link(name: str, url: str) -> str: - """生成点击链接按钮 - Args: - name: 链接显示内容 - url: 链接地址 - - Returns: - markdown格式的链接 - - """ - return f"[🔗{name}]({url})" - - @staticmethod - def image(url: str, size: tuple[int, int]) -> str: - """构建图片链接 - Args: - size: - url: 图片链接 - - Returns: - markdown格式的图片 - - """ - return f"![image #{size[0]}px #{size[1]}px]({url})" - - @staticmethod - async def image_async(url: str) -> str: - """获取图片,自动请求获取大小,异步 - Args: - url: 图片链接 - - Returns: - 图片Markdown语法: ![image #{width}px #{height}px](link) - - """ - try: - async with aiohttp.ClientSession() as session: - async with session.get(url) as resp: - image = Image.open(io.BytesIO(await resp.read())) - return MarkdownMessage.image(url, image.size) - except Exception as e: - nonebot.logger.error(f"get image error: {e}") - return "[Image Error]" - - @staticmethod - def escape(text: str) -> str: - """转义特殊字符 - Args: - text: 需要转义的文本,请勿直接把整个markdown文本传入,否则会转义掉所有字符 - - Returns: - 转义后的文本 - - """ - chars = "*[]()~_`>#+=|{}.!" - for char in chars: - text = text.replace(char, f"\\\\{char}") - return text +import base64 +import io +from typing import Any +from urllib.parse import quote + +import aiofiles +import aiohttp +import nonebot +from PIL import Image +from nonebot.adapters.onebot import v11 + +from .html_tool import md_to_pic +from .. import load_from_yaml +from ..base.ly_typing import T_Bot, T_Message, T_MessageEvent + +config = load_from_yaml("config.yml") + + +async def broadcast_to_superusers(message: str | T_Message, markdown: bool = False): + """广播消息给超级用户""" + for bot in nonebot.get_bots().values(): + for user_id in config.get("superusers", []): + if markdown: + await MarkdownMessage.send_md(message, bot, message_type="private", session_id=user_id) + else: + await bot.send_private_msg(user_id=user_id, message=message) + + +class MarkdownMessage: + @staticmethod + async def send_md( + markdown: str, + bot: T_Bot, *, + message_type: str = None, + session_id: str | int = None + ) -> dict[str, Any] | None: + """ + 发送Markdown消息,支持自动转为图片发送 + Args: + markdown: + bot: + message_type: + session_id: + Returns: + + """ + plain_markdown = markdown.replace("[🔗", "[") + md_image_bytes = await md_to_pic( + md=plain_markdown, + width=540, + device_scale_factor=4 + ) + print(md_image_bytes) + data = await bot.send_msg( + message_type=message_type, + group_id=session_id, + user_id=session_id, + message=v11.MessageSegment.image(md_image_bytes), + ) + return data + + @staticmethod + async def send_image( + image: bytes | str, + bot: T_Bot, *, + message_type: str = None, + session_id: str | int = None, + event: T_MessageEvent = None, + **kwargs + ) -> dict: + """ + 发送单张装逼大图 + Args: + image: 图片字节流或图片本地路径,链接请使用Markdown.image_async方法获取后通过send_md发送 + bot: bot instance + message_type: message message_type + session_id: session id + event: event + kwargs: other arguments + Returns: + dict: response data + """ + if isinstance(image, str): + async with aiofiles.open(image, "rb") as f: + image = await f.read() + method = 2 + if method == 2: + base64_string = base64.b64encode(image).decode("utf-8") + data = await bot.call_api("upload_image", file=f"base64://{base64_string}") + await MarkdownMessage.send_md(MarkdownMessage.image(data, Image.open(io.BytesIO(image)).size), bot, + message_type=message_type, + session_id=session_id) + + # 其他实现端方案 + else: + image_message_id = (await bot.send_private_msg( + user_id=bot.self_id, + message=[ + v11.MessageSegment.image(file=image) + ] + ))["message_id"] + image_url = (await bot.get_msg(message_id=image_message_id))["message"][0]["data"]["url"] + image_size = Image.open(io.BytesIO(image)).size + image_md = MarkdownMessage.image(image_url, image_size) + return await MarkdownMessage.send_md(image_md, bot, message_type=message_type, session_id=session_id) + + if data is None: + data = await bot.send_msg( + message_type=message_type, + group_id=session_id, + user_id=session_id, + message=v11.MessageSegment.image(image), + **kwargs + ) + return data + + @staticmethod + async def get_image_url(image: bytes | str, bot: T_Bot) -> str: + """把图片上传到图床,返回链接 + Args: + bot: 发送的bot + image: 图片字节流或图片本地路径 + Returns: + """ + # 等林文轩修好Lagrange.OneBot再说 + + @staticmethod + def btn_cmd(name: str, cmd: str, reply: bool = False, enter: bool = True) -> str: + """生成点击回调按钮 + Args: + name: 按钮显示内容 + cmd: 发送的命令,已在函数内url编码,不需要再次编码 + reply: 是否以回复的方式发送消息 + enter: 自动发送消息则为True,否则填充到输入框 + + Returns: + markdown格式的可点击回调按钮 + + """ + if "" not in config.get("command_start", ["/"]) and config.get("alconna_use_command_start", False): + cmd = f"{config['command_start'][0]}{cmd}" + return f"[{name}](mqqapi://aio/inlinecmd?command={quote(cmd)}&reply={str(reply).lower()}&enter={str(enter).lower()})" + + @staticmethod + def btn_link(name: str, url: str) -> str: + """生成点击链接按钮 + Args: + name: 链接显示内容 + url: 链接地址 + + Returns: + markdown格式的链接 + + """ + return f"[🔗{name}]({url})" + + @staticmethod + def image(url: str, size: tuple[int, int]) -> str: + """构建图片链接 + Args: + size: + url: 图片链接 + + Returns: + markdown格式的图片 + + """ + return f"![image #{size[0]}px #{size[1]}px]({url})" + + @staticmethod + async def image_async(url: str) -> str: + """获取图片,自动请求获取大小,异步 + Args: + url: 图片链接 + + Returns: + 图片Markdown语法: ![image #{width}px #{height}px](link) + + """ + try: + async with aiohttp.ClientSession() as session: + async with session.get(url) as resp: + image = Image.open(io.BytesIO(await resp.read())) + return MarkdownMessage.image(url, image.size) + except Exception as e: + nonebot.logger.error(f"get image error: {e}") + return "[Image Error]" + + @staticmethod + def escape(text: str) -> str: + """转义特殊字符 + Args: + text: 需要转义的文本,请勿直接把整个markdown文本传入,否则会转义掉所有字符 + + Returns: + 转义后的文本 + + """ + chars = "*[]()~_`>#+=|{}.!" + for char in chars: + text = text.replace(char, f"\\\\{char}") + return text diff --git a/src/utils/message/string_tool.py b/src/utils/message/string_tool.py index 6193bd5f..32cf9a0a 100644 --- a/src/utils/message/string_tool.py +++ b/src/utils/message/string_tool.py @@ -1,101 +1,101 @@ -import nonebot - - -def convert_duration(text: str, default) -> float: - """ - 转换自然语言时间为秒数 - Args: - text: 1d2h3m - default: 出错时返回 - - Returns: - float: 总秒数 - """ - units = { - "d" : 86400, - "h" : 3600, - "m" : 60, - "s" : 1, - "ms": 0.001 - } - - duration = 0 - current_number = '' - current_unit = '' - try: - for char in text: - if char.isdigit(): - current_number += char - else: - if current_number: - duration += int(current_number) * units[current_unit] - current_number = '' - if char in units: - current_unit = char - else: - current_unit = '' - - if current_number: - duration += int(current_number) * units[current_unit] - - return duration - - except BaseException as e: - nonebot.logger.info(f"convert_duration error: {e}") - return default - - -def convert_time_to_seconds(time_str): - """转换自然语言时长为秒数 - Args: - time_str: 1d2m3s - - Returns: - - """ - seconds = 0 - current_number = '' - - for char in time_str: - if char.isdigit() or char == '.': - current_number += char - elif char == 'd': - seconds += float(current_number) * 24 * 60 * 60 - current_number = '' - elif char == 'h': - seconds += float(current_number) * 60 * 60 - current_number = '' - elif char == 'm': - seconds += float(current_number) * 60 - current_number = '' - elif char == 's': - seconds += float(current_number) - current_number = '' - - return int(seconds) - - -def convert_seconds_to_time(seconds): - """转换秒数为自然语言时长 - Args: - seconds: 10000 - - Returns: - - """ - d = seconds // (24 * 60 * 60) - h = (seconds % (24 * 60 * 60)) // (60 * 60) - m = (seconds % (60 * 60)) // 60 - s = seconds % 60 - - # 若值为0则不显示 - time_str = '' - if d: - time_str += f"{d}d" - if h: - time_str += f"{h}h" - if m: - time_str += f"{m}m" - if not time_str: - time_str = f"{s}s" - return time_str +import nonebot + + +def convert_duration(text: str, default) -> float: + """ + 转换自然语言时间为秒数 + Args: + text: 1d2h3m + default: 出错时返回 + + Returns: + float: 总秒数 + """ + units = { + "d" : 86400, + "h" : 3600, + "m" : 60, + "s" : 1, + "ms": 0.001 + } + + duration = 0 + current_number = '' + current_unit = '' + try: + for char in text: + if char.isdigit(): + current_number += char + else: + if current_number: + duration += int(current_number) * units[current_unit] + current_number = '' + if char in units: + current_unit = char + else: + current_unit = '' + + if current_number: + duration += int(current_number) * units[current_unit] + + return duration + + except BaseException as e: + nonebot.logger.info(f"convert_duration error: {e}") + return default + + +def convert_time_to_seconds(time_str): + """转换自然语言时长为秒数 + Args: + time_str: 1d2m3s + + Returns: + + """ + seconds = 0 + current_number = '' + + for char in time_str: + if char.isdigit() or char == '.': + current_number += char + elif char == 'd': + seconds += float(current_number) * 24 * 60 * 60 + current_number = '' + elif char == 'h': + seconds += float(current_number) * 60 * 60 + current_number = '' + elif char == 'm': + seconds += float(current_number) * 60 + current_number = '' + elif char == 's': + seconds += float(current_number) + current_number = '' + + return int(seconds) + + +def convert_seconds_to_time(seconds): + """转换秒数为自然语言时长 + Args: + seconds: 10000 + + Returns: + + """ + d = seconds // (24 * 60 * 60) + h = (seconds % (24 * 60 * 60)) // (60 * 60) + m = (seconds % (60 * 60)) // 60 + s = seconds % 60 + + # 若值为0则不显示 + time_str = '' + if d: + time_str += f"{d}d" + if h: + time_str += f"{h}h" + if m: + time_str += f"{m}m" + if not time_str: + time_str = f"{s}s" + return time_str diff --git a/src/utils/message/tools.py b/src/utils/message/tools.py index 7d051139..e458ee26 100644 --- a/src/utils/message/tools.py +++ b/src/utils/message/tools.py @@ -1,99 +1,99 @@ -import random -from importlib.metadata import PackageNotFoundError, version - - -def clamp(value: float, min_value: float, max_value: float) -> float | int: - """将值限制在最小值和最大值之间 - - Args: - value (float): 要限制的值 - min_value (float): 最小值 - max_value (float): 最大值 - - Returns: - float: 限制后的值 - """ - return max(min(value, max_value), min_value) - - -def convert_size(size: int, precision: int = 2, add_unit: bool = True, suffix: str = " XiB") -> str | float: - """把字节数转换为人类可读的字符串,计算正负 - - Args: - - add_unit: 是否添加单位,False后则suffix无效 - suffix: XiB或XB - precision: 浮点数的小数点位数 - size (int): 字节数 - - Returns: - - str: The human-readable string, e.g. "1.23 GB". - """ - is_negative = size < 0 - size = abs(size) - for unit in ("", "K", "M", "G", "T", "P", "E", "Z"): - if size < 1024: - break - size /= 1024 - if is_negative: - size = -size - if add_unit: - return f"{size:.{precision}f}{suffix.replace('X', unit)}" - else: - return size - - -def keywords_in_text(keywords: list[str], text: str, all_matched: bool) -> bool: - """ - 检查关键词是否在文本中 - Args: - keywords: 关键词列表 - text: 文本 - all_matched: 是否需要全部匹配 - - Returns: - - """ - if all_matched: - for keyword in keywords: - if keyword not in text: - return False - return True - else: - for keyword in keywords: - if keyword in text: - return True - return False - - -def check_for_package(package_name: str) -> bool: - try: - version(package_name) - return True - except PackageNotFoundError: - return False - - -def random_ascii_string(length: int) -> str: - """ - 生成随机ASCII字符串 - Args: - length: - - Returns: - - """ - return "".join([chr(random.randint(33, 126)) for _ in range(length)]) - - -def random_hex_string(length: int) -> str: - """ - 生成随机十六进制字符串 - Args: - length: - - Returns: - - """ - return "".join([random.choice("0123456789abcdef") for _ in range(length)]) +import random +from importlib.metadata import PackageNotFoundError, version + + +def clamp(value: float, min_value: float, max_value: float) -> float | int: + """将值限制在最小值和最大值之间 + + Args: + value (float): 要限制的值 + min_value (float): 最小值 + max_value (float): 最大值 + + Returns: + float: 限制后的值 + """ + return max(min(value, max_value), min_value) + + +def convert_size(size: int, precision: int = 2, add_unit: bool = True, suffix: str = " XiB") -> str | float: + """把字节数转换为人类可读的字符串,计算正负 + + Args: + + add_unit: 是否添加单位,False后则suffix无效 + suffix: XiB或XB + precision: 浮点数的小数点位数 + size (int): 字节数 + + Returns: + + str: The human-readable string, e.g. "1.23 GB". + """ + is_negative = size < 0 + size = abs(size) + for unit in ("", "K", "M", "G", "T", "P", "E", "Z"): + if size < 1024: + break + size /= 1024 + if is_negative: + size = -size + if add_unit: + return f"{size:.{precision}f}{suffix.replace('X', unit)}" + else: + return size + + +def keywords_in_text(keywords: list[str], text: str, all_matched: bool) -> bool: + """ + 检查关键词是否在文本中 + Args: + keywords: 关键词列表 + text: 文本 + all_matched: 是否需要全部匹配 + + Returns: + + """ + if all_matched: + for keyword in keywords: + if keyword not in text: + return False + return True + else: + for keyword in keywords: + if keyword in text: + return True + return False + + +def check_for_package(package_name: str) -> bool: + try: + version(package_name) + return True + except PackageNotFoundError: + return False + + +def random_ascii_string(length: int) -> str: + """ + 生成随机ASCII字符串 + Args: + length: + + Returns: + + """ + return "".join([chr(random.randint(33, 126)) for _ in range(length)]) + + +def random_hex_string(length: int) -> str: + """ + 生成随机十六进制字符串 + Args: + length: + + Returns: + + """ + return "".join([random.choice("0123456789abcdef") for _ in range(length)]) diff --git a/src/utils/satori_utils/__init__.py b/src/utils/satori_utils/__init__.py index 33e7964f..192714bb 100644 --- a/src/utils/satori_utils/__init__.py +++ b/src/utils/satori_utils/__init__.py @@ -1,3 +1,3 @@ -from .user_info import user_infos -from .count_friends import count_friends -from .count_groups import count_groups +from .user_info import user_infos +from .count_friends import count_friends +from .count_groups import count_groups diff --git a/src/utils/satori_utils/count_friends.py b/src/utils/satori_utils/count_friends.py index 2752aac4..3f7b18ea 100644 --- a/src/utils/satori_utils/count_friends.py +++ b/src/utils/satori_utils/count_friends.py @@ -1,13 +1,13 @@ -from nonebot.adapters import satori - - -async def count_friends(bot: satori.Bot) -> int: - cnt: int = 0 - - friend_response = await bot.friend_list() - while friend_response.next is not None: - cnt += len(friend_response.data) - friend_response = await bot.friend_list(next_token=friend_response.next) - - cnt += len(friend_response.data) - return cnt - 1 +from nonebot.adapters import satori + + +async def count_friends(bot: satori.Bot) -> int: + cnt: int = 0 + + friend_response = await bot.friend_list() + while friend_response.next is not None: + cnt += len(friend_response.data) + friend_response = await bot.friend_list(next_token=friend_response.next) + + cnt += len(friend_response.data) + return cnt - 1 diff --git a/src/utils/satori_utils/count_groups.py b/src/utils/satori_utils/count_groups.py index 4cd30bd1..789a40b7 100644 --- a/src/utils/satori_utils/count_groups.py +++ b/src/utils/satori_utils/count_groups.py @@ -1,13 +1,13 @@ -from nonebot.adapters import satori - - -async def count_groups(bot: satori.Bot) -> int: - cnt: int = 0 - - group_response = await bot.guild_list() - while group_response.next is not None: - cnt += len(group_response.data) - group_response = await bot.friend_list(next_token=group_response.next) - - cnt += len(group_response.data) - return cnt - 1 +from nonebot.adapters import satori + + +async def count_groups(bot: satori.Bot) -> int: + cnt: int = 0 + + group_response = await bot.guild_list() + while group_response.next is not None: + cnt += len(group_response.data) + group_response = await bot.friend_list(next_token=group_response.next) + + cnt += len(group_response.data) + return cnt - 1 diff --git a/src/utils/satori_utils/user_info.py b/src/utils/satori_utils/user_info.py index 45184297..6500e719 100644 --- a/src/utils/satori_utils/user_info.py +++ b/src/utils/satori_utils/user_info.py @@ -1,64 +1,64 @@ -import nonebot - -from nonebot.adapters import satori -from nonebot.adapters.satori.models import User - - -class UserInfo: - user_infos: dict = {} - - async def load_friends(self, bot: satori.Bot): - nonebot.logger.info("Update user info from friends") - friend_response = await bot.friend_list() - while friend_response.next is not None: - for i in friend_response.data: - i: User = i - self.user_infos[str(i.id)] = i - friend_response = await bot.friend_list(next_token=friend_response.next) - - for i in friend_response.data: - i: User = i - self.user_infos[str(i.id)] = i - - nonebot.logger.info("Finish update user info") - - async def get(self, uid: int | str) -> User | None: - try: - return self.user_infos[str(uid)] - except KeyError: - return None - - async def put(self, user: User) -> bool: - """ - 向用户信息数据库中添加/修改一项,返回值仅代表数据是否变更,不代表操作是否成功 - Args: - user: 要加入数据库的用户 - - Returns: 当数据库中用户信息发生变化时返回 True, 否则返回 False - - """ - try: - old_user: User = self.user_infos[str(user.id)] - attr_edited = False - if user.name is not None: - if old_user.name != user.name: - attr_edited = True - self.user_infos[str(user.id)].name = user.name - if user.nick is not None: - if old_user.nick != user.nick: - attr_edited = True - self.user_infos[str(user.id)].nick = user.nick - if user.avatar is not None: - if old_user.avatar != user.avatar: - attr_edited = True - self.user_infos[str(user.id)].avatar = user.avatar - return attr_edited - except KeyError: - self.user_infos[str(user.id)] = user - return True - - def __init__(self): - pass - - -user_infos = UserInfo() +import nonebot + +from nonebot.adapters import satori +from nonebot.adapters.satori.models import User + + +class UserInfo: + user_infos: dict = {} + + async def load_friends(self, bot: satori.Bot): + nonebot.logger.info("Update user info from friends") + friend_response = await bot.friend_list() + while friend_response.next is not None: + for i in friend_response.data: + i: User = i + self.user_infos[str(i.id)] = i + friend_response = await bot.friend_list(next_token=friend_response.next) + + for i in friend_response.data: + i: User = i + self.user_infos[str(i.id)] = i + + nonebot.logger.info("Finish update user info") + + async def get(self, uid: int | str) -> User | None: + try: + return self.user_infos[str(uid)] + except KeyError: + return None + + async def put(self, user: User) -> bool: + """ + 向用户信息数据库中添加/修改一项,返回值仅代表数据是否变更,不代表操作是否成功 + Args: + user: 要加入数据库的用户 + + Returns: 当数据库中用户信息发生变化时返回 True, 否则返回 False + + """ + try: + old_user: User = self.user_infos[str(user.id)] + attr_edited = False + if user.name is not None: + if old_user.name != user.name: + attr_edited = True + self.user_infos[str(user.id)].name = user.name + if user.nick is not None: + if old_user.nick != user.nick: + attr_edited = True + self.user_infos[str(user.id)].nick = user.nick + if user.avatar is not None: + if old_user.avatar != user.avatar: + attr_edited = True + self.user_infos[str(user.id)].avatar = user.avatar + return attr_edited + except KeyError: + self.user_infos[str(user.id)] = user + return True + + def __init__(self): + pass + + +user_infos = UserInfo() diff --git a/tests/test_ipc.py b/tests/test_ipc.py new file mode 100644 index 00000000..ed039542 --- /dev/null +++ b/tests/test_ipc.py @@ -0,0 +1,22 @@ +from liteyuki.comm import Channel as Chan +from multiprocessing import Process + + +def p1(chan: Chan): + for i in range(10): + chan.send(i) + + +def p2(chan: Chan): + while True: + print(chan.recv()) + + +def test_ipc(): + chan = Chan("Name") + + p1_proc = Process(target=p1, args=(chan,)) + p2_proc = Process(target=p2, args=(chan,)) + + p1_proc.start() + p2_proc.start()