2024-05-10 00:12:48 +08:00
|
|
|
import datetime
|
|
|
|
|
2024-05-11 00:16:38 +08:00
|
|
|
import aiohttp
|
2024-05-10 00:12:48 +08:00
|
|
|
import httpx
|
|
|
|
import nonebot
|
|
|
|
from nonebot import require
|
|
|
|
from nonebot.exception import IgnoredException
|
|
|
|
from nonebot.message import event_preprocessor
|
|
|
|
from nonebot_plugin_alconna.typings import Event
|
|
|
|
|
|
|
|
require("nonebot_plugin_apscheduler")
|
|
|
|
|
|
|
|
from nonebot_plugin_apscheduler import scheduler
|
|
|
|
|
|
|
|
blacklist_data: dict[str, set[str]] = {}
|
|
|
|
blacklist: set[str] = set()
|
|
|
|
|
|
|
|
|
|
|
|
@scheduler.scheduled_job("interval", minutes=10, next_run_time=datetime.datetime.now())
|
|
|
|
async def update_blacklist():
|
|
|
|
await request_for_blacklist()
|
|
|
|
|
|
|
|
|
|
|
|
async def request_for_blacklist():
|
|
|
|
global blacklist
|
|
|
|
urls = [
|
|
|
|
"https://cdn.liteyuki.icu/static/ubl/"
|
|
|
|
]
|
|
|
|
|
|
|
|
platforms = [
|
|
|
|
"qq"
|
|
|
|
]
|
|
|
|
|
|
|
|
for plat in platforms:
|
|
|
|
for url in urls:
|
|
|
|
url += f"{plat}.txt"
|
2024-05-11 00:16:38 +08:00
|
|
|
async with aiohttp.ClientSession() as client:
|
2024-05-10 00:12:48 +08:00
|
|
|
resp = await client.get(url)
|
2024-05-11 00:16:38 +08:00
|
|
|
blacklist_data[plat] = set((await resp.text()).splitlines())
|
2024-05-10 00:12:48 +08:00
|
|
|
blacklist = get_uni_set()
|
|
|
|
nonebot.logger.info("blacklists updated")
|
|
|
|
|
|
|
|
|
|
|
|
def get_uni_set() -> set:
|
|
|
|
s = set()
|
|
|
|
for new_set in blacklist_data.values():
|
|
|
|
s.update(new_set)
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
@event_preprocessor
|
|
|
|
async def pre_handle(event: Event):
|
|
|
|
try:
|
|
|
|
user_id = str(event.get_user_id())
|
|
|
|
except:
|
|
|
|
return
|
|
|
|
|
|
|
|
if user_id in get_uni_set():
|
|
|
|
raise IgnoredException("UserId in blacklist")
|