pre implement cache

This commit is contained in:
Asankilp 2025-02-22 13:06:06 +08:00
parent 5eb3c66232
commit e1bc81c9e1
4 changed files with 37 additions and 1 deletions

View File

@ -0,0 +1,15 @@
from .instances import cache
def from_cache(key):
def decorator(func):
def wrapper(*args, **kwargs):
cached = cache.get(key)
if cached:
return cached
else:
result = func(*args, **kwargs)
cache.set(key, result)
return result
return wrapper

View File

@ -3,7 +3,7 @@ from nonebot import get_driver
from openai import AsyncOpenAI
from .config import config
from .models import MarshoContext, MarshoTools
from .models import Cache, MarshoContext, MarshoTools
driver = get_driver()
@ -11,6 +11,7 @@ command_start = driver.config.command_start
model_name = config.marshoai_default_model
context = MarshoContext()
tools = MarshoTools()
cache = Cache()
token = config.marshoai_token
endpoint = config.marshoai_azure_endpoint
# client = ChatCompletionsClient(endpoint=endpoint, credential=AzureKeyCredential(token))

View File

@ -11,6 +11,25 @@ from nonebot import logger
from .config import config
class Cache:
"""
缓存类
"""
def __init__(self):
self.cache = {}
def get(self, key):
if key in self.cache:
return self.cache[key]
else:
self.cache[key] = None
return None
def set(self, key, value):
self.cache[key] = value
class MarshoContext:
"""
Marsho 的上下文类

View File

@ -23,6 +23,7 @@ from ._types import DeveloperMessage
from .config import config
from .constants import CODE_BLOCK_PATTERN, IMG_LATEX_PATTERN, OPENAI_NEW_MODELS
from .deal_latex import ConvertLatex
from .instances import cache
nickname_json = None # 记录昵称
praises_json = None # 记录夸赞名单