mirror of
https://github.com/LiteyukiStudio/nonebot-plugin-marshoai.git
synced 2025-03-10 15:21:02 +08:00
parent
8aff490aeb
commit
6f085b36c6
@ -1,6 +1,6 @@
|
|||||||
<!--suppress LongLine -->
|
<!--suppress LongLine -->
|
||||||
<div align="center">
|
<div align="center">
|
||||||
<a href="https://marsho.liteyuki.icu"><img src="https://marsho.liteyuki.icu/marsho-full.svg" width="800" height="430" alt="MarshoLogo"></a>
|
<a href="https://marshoai-docs.meli.liteyuki.icu"><img src="https://marshoai-docs.meli.liteyuki.icu/marsho-full.svg" width="800" height="430" alt="MarshoLogo"></a>
|
||||||
<br>
|
<br>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@ -48,7 +48,7 @@ _谁不喜欢回复消息快又可爱的猫娘呢?_
|
|||||||
|
|
||||||
## 😼 使用
|
## 😼 使用
|
||||||
|
|
||||||
请查看[使用文档](https://marsho.liteyuki.icu/start/use)
|
请查看[使用文档](https://marshoai-docs.meli.liteyuki.icu/start/use)
|
||||||
|
|
||||||
## ❤ 鸣谢&版权说明
|
## ❤ 鸣谢&版权说明
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
<!--suppress LongLine -->
|
<!--suppress LongLine -->
|
||||||
<div align="center">
|
<div align="center">
|
||||||
<a href="https://marsho.liteyuki.icu"><img src="https://marsho.liteyuki.icu/marsho-full.svg" width="800" height="430" alt="MarshoLogo"></a>
|
<a href="https://marshoai-docs.meli.liteyuki.icu"><img src="https://marshoai-docs.meli.liteyuki.icu/marsho-full.svg" width="800" height="430" alt="MarshoLogo"></a>
|
||||||
<br>
|
<br>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@ -48,7 +48,7 @@ Plugin internally installed the catgirl character of Marsho, is able to have a c
|
|||||||
- 🐾 Play! I like play with friends!
|
- 🐾 Play! I like play with friends!
|
||||||
|
|
||||||
## 😼 Usage
|
## 😼 Usage
|
||||||
Please read [Documentation](https://marsho.liteyuki.icu/start/install)
|
Please read [Documentation](https://marshoai-docs.meli.liteyuki.icu/start/install)
|
||||||
|
|
||||||
## ❤ Thanks&Copyright
|
## ❤ Thanks&Copyright
|
||||||
This project uses the following code from other projects:
|
This project uses the following code from other projects:
|
||||||
|
@ -65,7 +65,7 @@ When nonebot linked to OneBot v11 adapter, can recieve double click and response
|
|||||||
MarshoTools is a feature added in `v0.5.0`, support loading external function library to provide Function Call for Marsho.
|
MarshoTools is a feature added in `v0.5.0`, support loading external function library to provide Function Call for Marsho.
|
||||||
|
|
||||||
## 🧩 Marsho Plugin
|
## 🧩 Marsho Plugin
|
||||||
Marsho Plugin is a feature added in `v1.0.0`, replacing the old MarshoTools feature. [Documentation](https://marsho.liteyuki.icu/dev/extension)
|
Marsho Plugin is a feature added in `v1.0.0`, replacing the old MarshoTools feature. [Documentation](https://marshoai-docs.meli.liteyuki.icu/dev/extension)
|
||||||
|
|
||||||
## 👍 Praise list
|
## 👍 Praise list
|
||||||
|
|
||||||
@ -147,4 +147,5 @@ Add options in the `.env` file from the diagram below in nonebot2 project.
|
|||||||
| MARSHOAI_ENABLE_RICHTEXT_PARSE | `bool` | `true` | Turn on auto parse rich text feature(including image, LaTeX equation) |
|
| MARSHOAI_ENABLE_RICHTEXT_PARSE | `bool` | `true` | Turn on auto parse rich text feature(including image, LaTeX equation) |
|
||||||
| MARSHOAI_SINGLE_LATEX_PARSE | `bool` | `false`| Render single-line equation or not |
|
| MARSHOAI_SINGLE_LATEX_PARSE | `bool` | `false`| Render single-line equation or not |
|
||||||
| MARSHOAI_FIX_TOOLCALLS | `bool` | `true` | Fix tool calls or not |
|
| MARSHOAI_FIX_TOOLCALLS | `bool` | `true` | Fix tool calls or not |
|
||||||
| MARSHOAI_SEND_THINKING | `bool` | `true` | Send thinking chain or not |
|
| MARSHOAI_SEND_THINKING | `bool` | `true` | Send thinking chain or not |
|
||||||
|
| MARSHOAI_STREAM | `bool` | `false`| 是否通过流式方式请求 API **开启此项后暂无法使用函数调用,无法在 Bot 用户侧聊天界面呈现出流式效果** |
|
||||||
|
@ -68,7 +68,7 @@ GitHub Models API 的限制较多,不建议使用,建议通过修改`MARSHOA
|
|||||||
|
|
||||||
## 🧩 小棉插件
|
## 🧩 小棉插件
|
||||||
|
|
||||||
小棉插件是`v1.0.0`的新增功能,替代旧的小棉工具功能。[使用文档](https://marsho.liteyuki.icu/dev/extension)
|
小棉插件是`v1.0.0`的新增功能,替代旧的小棉工具功能。[使用文档](https://marshoai-docs.meli.liteyuki.icu/dev/extension)
|
||||||
|
|
||||||
## 👍 夸赞名单
|
## 👍 夸赞名单
|
||||||
|
|
||||||
@ -149,6 +149,8 @@ GitHub Models API 的限制较多,不建议使用,建议通过修改`MARSHOA
|
|||||||
| MARSHOAI_SINGLE_LATEX_PARSE | `bool` | `false` | 单行公式是否渲染(当消息富文本解析启用时可用)(如果单行也渲……只能说不好看) |
|
| MARSHOAI_SINGLE_LATEX_PARSE | `bool` | `false` | 单行公式是否渲染(当消息富文本解析启用时可用)(如果单行也渲……只能说不好看) |
|
||||||
| MARSHOAI_FIX_TOOLCALLS | `bool` | `true` | 是否修复工具调用(部分模型须关闭,使用 vLLM 部署的模型时须关闭) |
|
| MARSHOAI_FIX_TOOLCALLS | `bool` | `true` | 是否修复工具调用(部分模型须关闭,使用 vLLM 部署的模型时须关闭) |
|
||||||
| MARSHOAI_SEND_THINKING | `bool` | `true` | 是否发送思维链(部分模型不支持) |
|
| MARSHOAI_SEND_THINKING | `bool` | `true` | 是否发送思维链(部分模型不支持) |
|
||||||
|
| MARSHOAI_STREAM | `bool` | `false`| 是否通过流式方式请求 API **开启此项后暂无法使用函数调用,无法在 Bot 用户侧聊天界面呈现出流式效果** |
|
||||||
|
|
||||||
|
|
||||||
#### 开发及调试选项
|
#### 开发及调试选项
|
||||||
|
|
||||||
|
@ -32,6 +32,7 @@ class ConfigModel(BaseModel):
|
|||||||
marshoai_enable_sysasuser_prompt: bool = False
|
marshoai_enable_sysasuser_prompt: bool = False
|
||||||
marshoai_additional_prompt: str = ""
|
marshoai_additional_prompt: str = ""
|
||||||
marshoai_poke_suffix: str = "揉了揉你的猫耳"
|
marshoai_poke_suffix: str = "揉了揉你的猫耳"
|
||||||
|
marshoai_stream: bool = False
|
||||||
marshoai_enable_richtext_parse: bool = True
|
marshoai_enable_richtext_parse: bool = True
|
||||||
"""
|
"""
|
||||||
是否启用自动消息富文本解析 即若包含图片链接则发送图片、若包含LaTeX公式则发送公式图。
|
是否启用自动消息富文本解析 即若包含图片链接则发送图片、若包含LaTeX公式则发送公式图。
|
||||||
|
@ -37,7 +37,7 @@ OPENAI_NEW_MODELS: list = [
|
|||||||
INTRODUCTION: str = f"""MarshoAI-NoneBot by LiteyukiStudio
|
INTRODUCTION: str = f"""MarshoAI-NoneBot by LiteyukiStudio
|
||||||
你好喵~我是一只可爱的猫娘AI,名叫小棉~🐾!
|
你好喵~我是一只可爱的猫娘AI,名叫小棉~🐾!
|
||||||
我的主页在这里哦~↓↓↓
|
我的主页在这里哦~↓↓↓
|
||||||
https://marsho.liteyuki.icu
|
https://marshoai-docs.meli.liteyuki.icu
|
||||||
|
|
||||||
※ 使用 「{config.marshoai_default_name}.status」命令获取状态信息。
|
※ 使用 「{config.marshoai_default_name}.status」命令获取状态信息。
|
||||||
※ 使用「{config.marshoai_default_name}.help」命令获取使用说明。"""
|
※ 使用「{config.marshoai_default_name}.help」命令获取使用说明。"""
|
||||||
|
@ -18,8 +18,9 @@ from nonebot.matcher import (
|
|||||||
current_matcher,
|
current_matcher,
|
||||||
)
|
)
|
||||||
from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg
|
from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg
|
||||||
from openai import AsyncOpenAI
|
from openai import AsyncOpenAI, AsyncStream
|
||||||
from openai.types.chat import ChatCompletion, ChatCompletionMessage
|
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
||||||
|
from openai.types.chat.chat_completion import Choice
|
||||||
|
|
||||||
from .config import config
|
from .config import config
|
||||||
from .constants import SUPPORT_IMAGE_MODELS
|
from .constants import SUPPORT_IMAGE_MODELS
|
||||||
@ -94,9 +95,10 @@ class MarshoHandler:
|
|||||||
self,
|
self,
|
||||||
user_message: Union[str, list],
|
user_message: Union[str, list],
|
||||||
model_name: str,
|
model_name: str,
|
||||||
tools_list: list,
|
tools_list: list | None,
|
||||||
tool_message: Optional[list] = None,
|
tool_message: Optional[list] = None,
|
||||||
) -> ChatCompletion:
|
stream: bool = False,
|
||||||
|
) -> Union[ChatCompletion, AsyncStream[ChatCompletionChunk]]:
|
||||||
"""
|
"""
|
||||||
处理单条聊天
|
处理单条聊天
|
||||||
"""
|
"""
|
||||||
@ -109,12 +111,13 @@ class MarshoHandler:
|
|||||||
msg=context_msg + [UserMessage(content=user_message).as_dict()] + (tool_message if tool_message else []), # type: ignore
|
msg=context_msg + [UserMessage(content=user_message).as_dict()] + (tool_message if tool_message else []), # type: ignore
|
||||||
model_name=model_name,
|
model_name=model_name,
|
||||||
tools=tools_list if tools_list else None,
|
tools=tools_list if tools_list else None,
|
||||||
|
stream=stream,
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
async def handle_function_call(
|
async def handle_function_call(
|
||||||
self,
|
self,
|
||||||
completion: ChatCompletion,
|
completion: Union[ChatCompletion, AsyncStream[ChatCompletionChunk]],
|
||||||
user_message: Union[str, list],
|
user_message: Union[str, list],
|
||||||
model_name: str,
|
model_name: str,
|
||||||
tools_list: list,
|
tools_list: list,
|
||||||
@ -122,7 +125,10 @@ class MarshoHandler:
|
|||||||
# function call
|
# function call
|
||||||
# 需要获取额外信息,调用函数工具
|
# 需要获取额外信息,调用函数工具
|
||||||
tool_msg = []
|
tool_msg = []
|
||||||
choice = completion.choices[0]
|
if isinstance(completion, ChatCompletion):
|
||||||
|
choice = completion.choices[0]
|
||||||
|
else:
|
||||||
|
raise ValueError("Unexpected completion type")
|
||||||
# await UniMessage(str(response)).send()
|
# await UniMessage(str(response)).send()
|
||||||
tool_calls = choice.message.tool_calls
|
tool_calls = choice.message.tool_calls
|
||||||
# try:
|
# try:
|
||||||
@ -191,14 +197,23 @@ class MarshoHandler:
|
|||||||
"""
|
"""
|
||||||
global target_list
|
global target_list
|
||||||
if stream:
|
if stream:
|
||||||
raise NotImplementedError
|
response = await self.handle_stream_request(
|
||||||
response = await self.handle_single_chat(
|
user_message=user_message,
|
||||||
user_message=user_message,
|
model_name=model_name,
|
||||||
model_name=model_name,
|
tools_list=tools_list,
|
||||||
tools_list=tools_list,
|
tools_message=tool_message,
|
||||||
tool_message=tool_message,
|
)
|
||||||
)
|
else:
|
||||||
choice = response.choices[0]
|
response = await self.handle_single_chat( # type: ignore
|
||||||
|
user_message=user_message,
|
||||||
|
model_name=model_name,
|
||||||
|
tools_list=tools_list,
|
||||||
|
tool_message=tool_message,
|
||||||
|
)
|
||||||
|
if isinstance(response, ChatCompletion):
|
||||||
|
choice = response.choices[0]
|
||||||
|
else:
|
||||||
|
raise ValueError("Unexpected response type")
|
||||||
# Sprint(choice)
|
# Sprint(choice)
|
||||||
# 当tool_calls非空时,将finish_reason设置为TOOL_CALLS
|
# 当tool_calls非空时,将finish_reason设置为TOOL_CALLS
|
||||||
if choice.message.tool_calls is not None and config.marshoai_fix_toolcalls:
|
if choice.message.tool_calls is not None and config.marshoai_fix_toolcalls:
|
||||||
@ -240,3 +255,74 @@ class MarshoHandler:
|
|||||||
else:
|
else:
|
||||||
await UniMessage(f"意外的完成原因:{choice.finish_reason}").send()
|
await UniMessage(f"意外的完成原因:{choice.finish_reason}").send()
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
async def handle_stream_request(
|
||||||
|
self,
|
||||||
|
user_message: Union[str, list],
|
||||||
|
model_name: str,
|
||||||
|
tools_list: list,
|
||||||
|
tools_message: Optional[list] = None,
|
||||||
|
) -> Union[ChatCompletion, None]:
|
||||||
|
"""
|
||||||
|
处理流式请求
|
||||||
|
"""
|
||||||
|
response = await self.handle_single_chat(
|
||||||
|
user_message=user_message,
|
||||||
|
model_name=model_name,
|
||||||
|
tools_list=None, # TODO:让流式调用支持工具调用
|
||||||
|
tool_message=tools_message,
|
||||||
|
stream=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if isinstance(response, AsyncStream):
|
||||||
|
reasoning_contents = ""
|
||||||
|
answer_contents = ""
|
||||||
|
last_chunk = None
|
||||||
|
is_first_token_appeared = False
|
||||||
|
is_answering = False
|
||||||
|
async for chunk in response:
|
||||||
|
last_chunk = chunk
|
||||||
|
# print(chunk)
|
||||||
|
if not is_first_token_appeared:
|
||||||
|
logger.debug(f"{chunk.id}: 第一个 token 已出现")
|
||||||
|
is_first_token_appeared = True
|
||||||
|
if not chunk.choices:
|
||||||
|
logger.info("Usage:", chunk.usage)
|
||||||
|
else:
|
||||||
|
delta = chunk.choices[0].delta
|
||||||
|
if (
|
||||||
|
hasattr(delta, "reasoning_content")
|
||||||
|
and delta.reasoning_content is not None
|
||||||
|
):
|
||||||
|
reasoning_contents += delta.reasoning_content
|
||||||
|
else:
|
||||||
|
if not is_answering:
|
||||||
|
logger.debug(
|
||||||
|
f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出"
|
||||||
|
)
|
||||||
|
is_answering = True
|
||||||
|
if delta.content is not None:
|
||||||
|
answer_contents += delta.content
|
||||||
|
# print(last_chunk)
|
||||||
|
# 创建新的 ChatCompletion 对象
|
||||||
|
if last_chunk and last_chunk.choices:
|
||||||
|
message = ChatCompletionMessage(
|
||||||
|
content=answer_contents,
|
||||||
|
role="assistant",
|
||||||
|
tool_calls=last_chunk.choices[0].delta.tool_calls, # type: ignore
|
||||||
|
)
|
||||||
|
choice = Choice(
|
||||||
|
finish_reason=last_chunk.choices[0].finish_reason, # type: ignore
|
||||||
|
index=last_chunk.choices[0].index,
|
||||||
|
message=message,
|
||||||
|
)
|
||||||
|
return ChatCompletion(
|
||||||
|
id=last_chunk.id,
|
||||||
|
choices=[choice],
|
||||||
|
created=last_chunk.created,
|
||||||
|
model=last_chunk.model,
|
||||||
|
system_fingerprint=last_chunk.system_fingerprint,
|
||||||
|
object="chat.completion",
|
||||||
|
usage=last_chunk.usage,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
@ -257,7 +257,9 @@ async def marsho(
|
|||||||
)
|
)
|
||||||
logger.info(f"正在获取回答,模型:{model_name}")
|
logger.info(f"正在获取回答,模型:{model_name}")
|
||||||
# logger.info(f"上下文:{context_msg}")
|
# logger.info(f"上下文:{context_msg}")
|
||||||
response = await handler.handle_common_chat(usermsg, model_name, tools_lists)
|
response = await handler.handle_common_chat(
|
||||||
|
usermsg, model_name, tools_lists, config.marshoai_stream
|
||||||
|
)
|
||||||
# await UniMessage(str(response)).send()
|
# await UniMessage(str(response)).send()
|
||||||
if response is not None:
|
if response is not None:
|
||||||
context_user, context_assistant = response
|
context_user, context_assistant = response
|
||||||
@ -293,7 +295,7 @@ with contextlib.suppress(ImportError): # 优化先不做()
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
choice = response.choices[0]
|
choice = response.choices[0] # type: ignore
|
||||||
if choice.finish_reason == CompletionsFinishReason.STOPPED:
|
if choice.finish_reason == CompletionsFinishReason.STOPPED:
|
||||||
content = extract_content_and_think(choice.message)[0]
|
content = extract_content_and_think(choice.message)[0]
|
||||||
await UniMessage(" " + str(content)).send(at_sender=True)
|
await UniMessage(" " + str(content)).send(at_sender=True)
|
||||||
|
@ -5,7 +5,7 @@ from .constants import USAGE
|
|||||||
|
|
||||||
metadata = PluginMetadata(
|
metadata = PluginMetadata(
|
||||||
name="Marsho AI 插件",
|
name="Marsho AI 插件",
|
||||||
description="接入 Azure API 或其他 API 的 AI 聊天插件,支持图片处理,外部函数调用,兼容包括 DeepSeek-R1 在内的多个模型",
|
description="接入 Azure API 或其他 API 的 AI 聊天插件,支持图片处理,外部函数调用,兼容包括 DeepSeek-R1, QwQ-32B 在内的多个模型",
|
||||||
usage=USAGE,
|
usage=USAGE,
|
||||||
type="application",
|
type="application",
|
||||||
config=ConfigModel,
|
config=ConfigModel,
|
||||||
|
@ -3,7 +3,7 @@ import json
|
|||||||
import mimetypes
|
import mimetypes
|
||||||
import re
|
import re
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
import aiofiles # type: ignore
|
import aiofiles # type: ignore
|
||||||
import httpx
|
import httpx
|
||||||
@ -15,8 +15,8 @@ from nonebot.log import logger
|
|||||||
from nonebot_plugin_alconna import Image as ImageMsg
|
from nonebot_plugin_alconna import Image as ImageMsg
|
||||||
from nonebot_plugin_alconna import Text as TextMsg
|
from nonebot_plugin_alconna import Text as TextMsg
|
||||||
from nonebot_plugin_alconna import UniMessage
|
from nonebot_plugin_alconna import UniMessage
|
||||||
from openai import AsyncOpenAI, NotGiven
|
from openai import AsyncOpenAI, AsyncStream, NotGiven
|
||||||
from openai.types.chat import ChatCompletion, ChatCompletionMessage
|
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
||||||
from zhDateTime import DateTime
|
from zhDateTime import DateTime
|
||||||
|
|
||||||
from ._types import DeveloperMessage
|
from ._types import DeveloperMessage
|
||||||
@ -109,35 +109,13 @@ async def get_image_b64(url: str, timeout: int = 10) -> Optional[str]:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
async def make_chat(
|
|
||||||
client: ChatCompletionsClient,
|
|
||||||
msg: list,
|
|
||||||
model_name: str,
|
|
||||||
tools: Optional[list] = None,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
调用ai获取回复
|
|
||||||
|
|
||||||
参数:
|
|
||||||
client: 用于与AI模型进行通信
|
|
||||||
msg: 消息内容
|
|
||||||
model_name: 指定AI模型名
|
|
||||||
tools: 工具列表
|
|
||||||
"""
|
|
||||||
return await client.complete(
|
|
||||||
messages=msg,
|
|
||||||
model=model_name,
|
|
||||||
tools=tools,
|
|
||||||
**config.marshoai_model_args,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def make_chat_openai(
|
async def make_chat_openai(
|
||||||
client: AsyncOpenAI,
|
client: AsyncOpenAI,
|
||||||
msg: list,
|
msg: list,
|
||||||
model_name: str,
|
model_name: str,
|
||||||
tools: Optional[list] = None,
|
tools: Optional[list] = None,
|
||||||
) -> ChatCompletion:
|
stream: bool = False,
|
||||||
|
) -> Union[ChatCompletion, AsyncStream[ChatCompletionChunk]]:
|
||||||
"""
|
"""
|
||||||
使用 Openai SDK 调用ai获取回复
|
使用 Openai SDK 调用ai获取回复
|
||||||
|
|
||||||
@ -152,6 +130,7 @@ async def make_chat_openai(
|
|||||||
model=model_name,
|
model=model_name,
|
||||||
tools=tools or NOT_GIVEN,
|
tools=tools or NOT_GIVEN,
|
||||||
timeout=config.marshoai_timeout,
|
timeout=config.marshoai_timeout,
|
||||||
|
stream=stream,
|
||||||
**config.marshoai_model_args,
|
**config.marshoai_model_args,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ dependencies = [
|
|||||||
license = { text = "MIT, Mulan PSL v2" }
|
license = { text = "MIT, Mulan PSL v2" }
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
Homepage = "https://marsho.liteyuki.icu/"
|
Homepage = "https://marshoai-docs.meli.liteyuki.icu/"
|
||||||
|
|
||||||
|
|
||||||
[tool.nonebot]
|
[tool.nonebot]
|
||||||
|
Loading…
x
Reference in New Issue
Block a user