From 8ec7b3617d78c6fd31f5d561769da66cbcc21f0b Mon Sep 17 00:00:00 2001
From: zhulinyv
Date: Wed, 10 Apr 2024 08:17:33 +0800
Subject: [PATCH] =?UTF-8?q?:construction:=20=E4=BF=AE=E6=94=B9=E6=8F=92?=
=?UTF-8?q?=E4=BB=B6=E5=8A=A0=E8=BD=BD=E6=96=B9=E5=BC=8F?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
pyproject.toml | 17 +-
src/plugins/nonebot_bison/__init__.py | 47 ++
.../nonebot_bison/admin_page/__init__.py | 94 +++
src/plugins/nonebot_bison/admin_page/api.py | 199 ++++++
.../nonebot_bison/admin_page/dist/.gitkeep | 0
src/plugins/nonebot_bison/admin_page/jwt.py | 22 +
.../nonebot_bison/admin_page/token_manager.py | 25 +
src/plugins/nonebot_bison/admin_page/types.py | 52 ++
src/plugins/nonebot_bison/apis.py | 12 +
src/plugins/nonebot_bison/bootstrap.py | 49 ++
src/plugins/nonebot_bison/compat.py | 28 +
src/plugins/nonebot_bison/config/__init__.py | 4 +
.../nonebot_bison/config/config_legacy.py | 252 ++++++++
src/plugins/nonebot_bison/config/db_config.py | 263 ++++++++
.../nonebot_bison/config/db_migration.py | 71 +++
src/plugins/nonebot_bison/config/db_model.py | 68 +++
.../config/migrations/0571870f5222_init_db.py | 61 ++
.../migrations/4a46ba54a3f3_alter_type.py | 56 ++
.../migrations/5da28f6facb3_rename_tables.py | 33 +
.../5f3370328e44_add_time_weight_table.py | 48 ++
.../632b8086bc2b_add_user_target.py | 41 ++
...38b3f39c2_make_user_target_not_nullable.py | 45 ++
.../8d3863e9d74b_remove_uid_and_type.py | 34 ++
.../a333d6224193_add_last_scheduled_time.py | 32 +
.../migrations/a5466912fad0_map_user.py | 52 ++
.../aceef470d69c_alter_fields_not_null.py | 52 ++
.../bd92923c218f_alter_json_not_null.py | 53 ++
.../migrations/c97c445e2bdb_add_constraint.py | 43 ++
.../f9baef347cc8_remove_old_target.py | 34 ++
.../nonebot_bison/config/subs_io/__init__.py | 3 +
.../config/subs_io/nbesf_model/__init__.py | 6 +
.../config/subs_io/nbesf_model/base.py | 35 ++
.../config/subs_io/nbesf_model/v1.py | 130 ++++
.../config/subs_io/nbesf_model/v2.py | 106 ++++
.../nonebot_bison/config/subs_io/subs_io.py | 77 +++
.../nonebot_bison/config/subs_io/utils.py | 4 +
src/plugins/nonebot_bison/config/utils.py | 10 +
.../nonebot_bison/platform/__init__.py | 24 +
.../nonebot_bison/platform/arknights.py | 251 ++++++++
.../nonebot_bison/platform/bilibili.py | 567 ++++++++++++++++++
src/plugins/nonebot_bison/platform/ff14.py | 46 ++
src/plugins/nonebot_bison/platform/ncm.py | 129 ++++
.../nonebot_bison/platform/platform.py | 501 ++++++++++++++++
src/plugins/nonebot_bison/platform/rss.py | 81 +++
src/plugins/nonebot_bison/platform/weibo.py | 191 ++++++
src/plugins/nonebot_bison/plugin_config.py | 43 ++
src/plugins/nonebot_bison/post/__init__.py | 1 +
.../nonebot_bison/post/abstract_post.py | 51 ++
src/plugins/nonebot_bison/post/post.py | 109 ++++
.../nonebot_bison/scheduler/__init__.py | 3 +
.../nonebot_bison/scheduler/manager.py | 50 ++
.../nonebot_bison/scheduler/scheduler.py | 153 +++++
src/plugins/nonebot_bison/script/__init__.py | 0
src/plugins/nonebot_bison/script/cli.py | 160 +++++
src/plugins/nonebot_bison/send.py | 80 +++
.../nonebot_bison/sub_manager/__init__.py | 120 ++++
.../nonebot_bison/sub_manager/add_sub.py | 156 +++++
.../nonebot_bison/sub_manager/del_sub.py | 52 ++
.../nonebot_bison/sub_manager/query_sub.py | 28 +
.../nonebot_bison/sub_manager/utils.py | 62 ++
src/plugins/nonebot_bison/theme/__init__.py | 22 +
src/plugins/nonebot_bison/theme/registry.py | 36 ++
.../theme/themes/arknights/__init__.py | 3 +
.../theme/themes/arknights/build.py | 69 +++
.../arknights/templates/announce.html.jinja | 31 +
.../themes/arknights/templates/style.css | 107 ++++
.../theme/themes/basic/__init__.py | 3 +
.../nonebot_bison/theme/themes/basic/build.py | 52 ++
.../theme/themes/brief/__init__.py | 3 +
.../nonebot_bison/theme/themes/brief/build.py | 39 ++
.../theme/themes/ceobe_canteen/README.md | 10 +
.../theme/themes/ceobe_canteen/__init__.py | 3 +
.../theme/themes/ceobe_canteen/build.py | 114 ++++
.../ceobe_canteen/templates/bison_logo.jpg | Bin 0 -> 108454 bytes
.../templates/ceobe_canteen.html.jinja | 98 +++
.../templates/ceobecanteen_logo.png | Bin 0 -> 723399 bytes
.../theme/themes/ht2i/__init__.py | 3 +
.../nonebot_bison/theme/themes/ht2i/build.py | 62 ++
src/plugins/nonebot_bison/theme/types.py | 78 +++
src/plugins/nonebot_bison/theme/utils.py | 22 +
src/plugins/nonebot_bison/types.py | 60 ++
src/plugins/nonebot_bison/utils/__init__.py | 108 ++++
src/plugins/nonebot_bison/utils/context.py | 43 ++
src/plugins/nonebot_bison/utils/get_bot.py | 34 ++
src/plugins/nonebot_bison/utils/http.py | 18 +
src/plugins/nonebot_bison/utils/image.py | 110 ++++
.../nonebot_bison/utils/scheduler_config.py | 35 ++
src/plugins/smart_reply/utils.py | 58 +-
88 files changed, 6188 insertions(+), 49 deletions(-)
create mode 100644 src/plugins/nonebot_bison/__init__.py
create mode 100644 src/plugins/nonebot_bison/admin_page/__init__.py
create mode 100644 src/plugins/nonebot_bison/admin_page/api.py
create mode 100644 src/plugins/nonebot_bison/admin_page/dist/.gitkeep
create mode 100644 src/plugins/nonebot_bison/admin_page/jwt.py
create mode 100644 src/plugins/nonebot_bison/admin_page/token_manager.py
create mode 100644 src/plugins/nonebot_bison/admin_page/types.py
create mode 100644 src/plugins/nonebot_bison/apis.py
create mode 100644 src/plugins/nonebot_bison/bootstrap.py
create mode 100644 src/plugins/nonebot_bison/compat.py
create mode 100644 src/plugins/nonebot_bison/config/__init__.py
create mode 100644 src/plugins/nonebot_bison/config/config_legacy.py
create mode 100644 src/plugins/nonebot_bison/config/db_config.py
create mode 100644 src/plugins/nonebot_bison/config/db_migration.py
create mode 100644 src/plugins/nonebot_bison/config/db_model.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/0571870f5222_init_db.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/4a46ba54a3f3_alter_type.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/5da28f6facb3_rename_tables.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/5f3370328e44_add_time_weight_table.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/632b8086bc2b_add_user_target.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/67c38b3f39c2_make_user_target_not_nullable.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/8d3863e9d74b_remove_uid_and_type.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/a333d6224193_add_last_scheduled_time.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/a5466912fad0_map_user.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/aceef470d69c_alter_fields_not_null.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/bd92923c218f_alter_json_not_null.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/c97c445e2bdb_add_constraint.py
create mode 100644 src/plugins/nonebot_bison/config/migrations/f9baef347cc8_remove_old_target.py
create mode 100644 src/plugins/nonebot_bison/config/subs_io/__init__.py
create mode 100644 src/plugins/nonebot_bison/config/subs_io/nbesf_model/__init__.py
create mode 100644 src/plugins/nonebot_bison/config/subs_io/nbesf_model/base.py
create mode 100644 src/plugins/nonebot_bison/config/subs_io/nbesf_model/v1.py
create mode 100644 src/plugins/nonebot_bison/config/subs_io/nbesf_model/v2.py
create mode 100644 src/plugins/nonebot_bison/config/subs_io/subs_io.py
create mode 100644 src/plugins/nonebot_bison/config/subs_io/utils.py
create mode 100644 src/plugins/nonebot_bison/config/utils.py
create mode 100644 src/plugins/nonebot_bison/platform/__init__.py
create mode 100644 src/plugins/nonebot_bison/platform/arknights.py
create mode 100644 src/plugins/nonebot_bison/platform/bilibili.py
create mode 100644 src/plugins/nonebot_bison/platform/ff14.py
create mode 100644 src/plugins/nonebot_bison/platform/ncm.py
create mode 100644 src/plugins/nonebot_bison/platform/platform.py
create mode 100644 src/plugins/nonebot_bison/platform/rss.py
create mode 100644 src/plugins/nonebot_bison/platform/weibo.py
create mode 100644 src/plugins/nonebot_bison/plugin_config.py
create mode 100644 src/plugins/nonebot_bison/post/__init__.py
create mode 100644 src/plugins/nonebot_bison/post/abstract_post.py
create mode 100644 src/plugins/nonebot_bison/post/post.py
create mode 100644 src/plugins/nonebot_bison/scheduler/__init__.py
create mode 100644 src/plugins/nonebot_bison/scheduler/manager.py
create mode 100644 src/plugins/nonebot_bison/scheduler/scheduler.py
create mode 100644 src/plugins/nonebot_bison/script/__init__.py
create mode 100644 src/plugins/nonebot_bison/script/cli.py
create mode 100644 src/plugins/nonebot_bison/send.py
create mode 100644 src/plugins/nonebot_bison/sub_manager/__init__.py
create mode 100644 src/plugins/nonebot_bison/sub_manager/add_sub.py
create mode 100644 src/plugins/nonebot_bison/sub_manager/del_sub.py
create mode 100644 src/plugins/nonebot_bison/sub_manager/query_sub.py
create mode 100644 src/plugins/nonebot_bison/sub_manager/utils.py
create mode 100644 src/plugins/nonebot_bison/theme/__init__.py
create mode 100644 src/plugins/nonebot_bison/theme/registry.py
create mode 100644 src/plugins/nonebot_bison/theme/themes/arknights/__init__.py
create mode 100644 src/plugins/nonebot_bison/theme/themes/arknights/build.py
create mode 100644 src/plugins/nonebot_bison/theme/themes/arknights/templates/announce.html.jinja
create mode 100644 src/plugins/nonebot_bison/theme/themes/arknights/templates/style.css
create mode 100644 src/plugins/nonebot_bison/theme/themes/basic/__init__.py
create mode 100644 src/plugins/nonebot_bison/theme/themes/basic/build.py
create mode 100644 src/plugins/nonebot_bison/theme/themes/brief/__init__.py
create mode 100644 src/plugins/nonebot_bison/theme/themes/brief/build.py
create mode 100644 src/plugins/nonebot_bison/theme/themes/ceobe_canteen/README.md
create mode 100644 src/plugins/nonebot_bison/theme/themes/ceobe_canteen/__init__.py
create mode 100644 src/plugins/nonebot_bison/theme/themes/ceobe_canteen/build.py
create mode 100644 src/plugins/nonebot_bison/theme/themes/ceobe_canteen/templates/bison_logo.jpg
create mode 100644 src/plugins/nonebot_bison/theme/themes/ceobe_canteen/templates/ceobe_canteen.html.jinja
create mode 100644 src/plugins/nonebot_bison/theme/themes/ceobe_canteen/templates/ceobecanteen_logo.png
create mode 100644 src/plugins/nonebot_bison/theme/themes/ht2i/__init__.py
create mode 100644 src/plugins/nonebot_bison/theme/themes/ht2i/build.py
create mode 100644 src/plugins/nonebot_bison/theme/types.py
create mode 100644 src/plugins/nonebot_bison/theme/utils.py
create mode 100644 src/plugins/nonebot_bison/types.py
create mode 100644 src/plugins/nonebot_bison/utils/__init__.py
create mode 100644 src/plugins/nonebot_bison/utils/context.py
create mode 100644 src/plugins/nonebot_bison/utils/get_bot.py
create mode 100644 src/plugins/nonebot_bison/utils/http.py
create mode 100644 src/plugins/nonebot_bison/utils/image.py
create mode 100644 src/plugins/nonebot_bison/utils/scheduler_config.py
diff --git a/pyproject.toml b/pyproject.toml
index c1391eb6..ceb15576 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -131,10 +131,25 @@ win32-setctime = "1.1.0"
wordcloud = "1.8.2.2"
[tool.nonebot]
-plugins = ["nonebot_plugin_guild_patch", "nonebot_bison"]
+plugins = ["nonebot_plugin_guild_patch"]
plugin_dirs = ["src/plugins"]
adapters = [{name = "OneBot V11", module_name = "nonebot.adapters.onebot.v11", project_link = "nonebot-adapter-onebot", desc = "OneBot V11 协议"}]
+# https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html
+[tool.black]
+line-length = 500
+
+# https://beta.ruff.rs/docs/settings/
+[tool.ruff]
+line-length = 500
+# https://beta.ruff.rs/docs/rules/
+select = ["E", "W", "F"]
+ignore = ["F401"]
+# Exclude a variety of commonly ignored directories.
+respect-gitignore = true
+ignore-init-module-imports = true
+
+
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
diff --git a/src/plugins/nonebot_bison/__init__.py b/src/plugins/nonebot_bison/__init__.py
new file mode 100644
index 00000000..90da7a1a
--- /dev/null
+++ b/src/plugins/nonebot_bison/__init__.py
@@ -0,0 +1,47 @@
+from nonebot.plugin import PluginMetadata, require
+
+require("nonebot_plugin_apscheduler")
+require("nonebot_plugin_datastore")
+require("nonebot_plugin_saa")
+
+import nonebot_plugin_saa
+
+from .plugin_config import PlugConfig, plugin_config
+from . import post, send, theme, types, utils, config, platform, bootstrap, scheduler, admin_page, sub_manager
+
+__help__version__ = "0.8.2"
+nonebot_plugin_saa.enable_auto_select_bot()
+
+__help__plugin__name__ = "nonebot_bison"
+__usage__ = (
+ "本bot可以提供b站、微博等社交媒体的消息订阅,详情请查看本bot文档,"
+ f"或者{'at本bot' if plugin_config.bison_to_me else '' }发送“添加订阅”订阅第一个帐号,"
+ "发送“查询订阅”或“删除订阅”管理订阅"
+)
+
+__supported_adapters__ = nonebot_plugin_saa.__plugin_meta__.supported_adapters
+
+__plugin_meta__ = PluginMetadata(
+ name="Bison",
+ description="通用订阅推送插件",
+ usage=__usage__,
+ type="application",
+ homepage="https://github.com/felinae98/nonebot-bison",
+ config=PlugConfig,
+ supported_adapters=__supported_adapters__,
+ extra={"version": __help__version__, "docs": "https://nonebot-bison.netlify.app/"},
+)
+
+__all__ = [
+ "admin_page",
+ "bootstrap",
+ "config",
+ "sub_manager",
+ "post",
+ "scheduler",
+ "send",
+ "platform",
+ "types",
+ "utils",
+ "theme",
+]
diff --git a/src/plugins/nonebot_bison/admin_page/__init__.py b/src/plugins/nonebot_bison/admin_page/__init__.py
new file mode 100644
index 00000000..4c2d4e68
--- /dev/null
+++ b/src/plugins/nonebot_bison/admin_page/__init__.py
@@ -0,0 +1,94 @@
+import os
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+from nonebot.log import logger
+from nonebot.rule import to_me
+from nonebot.typing import T_State
+from nonebot import get_driver, on_command
+from nonebot.adapters.onebot.v11 import Bot
+from nonebot.adapters.onebot.v11.event import PrivateMessageEvent
+
+from .api import router as api_router
+from ..plugin_config import plugin_config
+from .token_manager import token_manager as tm
+
+if TYPE_CHECKING:
+ from nonebot.drivers.fastapi import Driver
+
+
+STATIC_PATH = (Path(__file__).parent / "dist").resolve()
+
+
+def init_fastapi(driver: "Driver"):
+ import socketio
+ from fastapi.applications import FastAPI
+ from fastapi.staticfiles import StaticFiles
+
+ sio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*")
+ socket_app = socketio.ASGIApp(sio, socketio_path="socket")
+
+ class SinglePageApplication(StaticFiles):
+ def __init__(self, directory: os.PathLike, index="index.html"):
+ self.index = index
+ super().__init__(directory=directory, packages=None, html=True, check_dir=True)
+
+ def lookup_path(self, path: str) -> tuple[str, os.stat_result | None]:
+ full_path, stat_res = super().lookup_path(path)
+ if stat_res is None:
+ return super().lookup_path(self.index)
+ return (full_path, stat_res)
+
+ def register_router_fastapi(driver: "Driver", socketio):
+ static_path = STATIC_PATH
+ nonebot_app = FastAPI(
+ title="nonebot-bison",
+ description="nonebot-bison webui and api",
+ )
+ nonebot_app.include_router(api_router)
+ nonebot_app.mount("/", SinglePageApplication(directory=static_path), name="bison-frontend")
+
+ app = driver.server_app
+ app.mount("/bison", nonebot_app, "nonebot-bison")
+
+ register_router_fastapi(driver, socket_app)
+ host = str(driver.config.host)
+ port = driver.config.port
+ if host in ["0.0.0.0", "127.0.0.1"]:
+ host = "localhost"
+ logger.opt(colors=True).info(f"Nonebot Bison frontend will be running at: http://{host}:{port}/bison")
+ logger.opt(colors=True).info("该页面不能被直接访问,请私聊bot 后台管理 以获取可访问地址")
+
+
+def register_get_token_handler():
+ get_token = on_command("后台管理", rule=to_me(), priority=5, aliases={"管理后台"})
+
+ @get_token.handle()
+ async def send_token(bot: "Bot", event: PrivateMessageEvent, state: T_State):
+ token = tm.get_user_token((event.get_user_id(), event.sender.nickname))
+ await get_token.finish(f"请访问: {plugin_config.outer_url / 'auth' / token}")
+
+ get_token.__help__name__ = "获取后台管理地址" # type: ignore
+ get_token.__help__info__ = "获取管理bot后台的地址,该地址会在一段时间过后过期,请不要泄漏该地址" # type: ignore
+
+
+def get_fastapi_driver() -> "Driver | None":
+ try:
+ from nonebot.drivers.fastapi import Driver
+
+ if (driver := get_driver()) and isinstance(driver, Driver):
+ return driver
+ return None
+
+ except ImportError:
+ return None
+
+
+if (STATIC_PATH / "index.html").exists():
+ if driver := get_fastapi_driver():
+ init_fastapi(driver)
+ register_get_token_handler()
+ else:
+ logger.warning("your driver is not fastapi, webui feature will be disabled")
+else:
+ logger.warning("Frontend file not found, please compile it or use docker or pypi version")
diff --git a/src/plugins/nonebot_bison/admin_page/api.py b/src/plugins/nonebot_bison/admin_page/api.py
new file mode 100644
index 00000000..afe834e0
--- /dev/null
+++ b/src/plugins/nonebot_bison/admin_page/api.py
@@ -0,0 +1,199 @@
+import nonebot
+from fastapi import status
+from fastapi.routing import APIRouter
+from fastapi.param_functions import Depends
+from fastapi.exceptions import HTTPException
+from nonebot_plugin_saa import TargetQQGroup
+from nonebot_plugin_saa.auto_select_bot import get_bot
+from fastapi.security.oauth2 import OAuth2PasswordBearer
+
+from ..types import WeightConfig
+from ..apis import check_sub_target
+from .jwt import load_jwt, pack_jwt
+from ..types import Target as T_Target
+from ..utils.get_bot import get_groups
+from ..platform import platform_manager
+from .token_manager import token_manager
+from ..config.db_config import SubscribeDupException
+from ..config import NoSuchUserException, NoSuchTargetException, NoSuchSubscribeException, config
+from .types import (
+ TokenResp,
+ GlobalConf,
+ StatusResp,
+ SubscribeResp,
+ PlatformConfig,
+ AddSubscribeReq,
+ SubscribeConfig,
+ SubscribeGroupDetail,
+)
+
+router = APIRouter(prefix="/api", tags=["api"])
+
+oath_scheme = OAuth2PasswordBearer(tokenUrl="token")
+
+
+async def get_jwt_obj(token: str = Depends(oath_scheme)):
+ obj = load_jwt(token)
+ if not obj:
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED)
+ return obj
+
+
+async def check_group_permission(groupNumber: int, token_obj: dict = Depends(get_jwt_obj)):
+ groups = token_obj["groups"]
+ for group in groups:
+ if int(groupNumber) == group["id"]:
+ return
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
+
+
+async def check_is_superuser(token_obj: dict = Depends(get_jwt_obj)):
+ if token_obj.get("type") != "admin":
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN)
+
+
+@router.get("/global_conf")
+async def get_global_conf() -> GlobalConf:
+ res = {}
+ for platform_name, platform in platform_manager.items():
+ res[platform_name] = PlatformConfig(
+ platformName=platform_name,
+ categories=platform.categories,
+ enabledTag=platform.enable_tag,
+ name=platform.name,
+ hasTarget=getattr(platform, "has_target"),
+ )
+ return GlobalConf(platformConf=res)
+
+
+async def get_admin_groups(qq: int):
+ res = []
+ for group in await get_groups():
+ group_id = group["group_id"]
+ bot = get_bot(TargetQQGroup(group_id=group_id))
+ if not bot:
+ continue
+ users = await bot.get_group_member_list(group_id=group_id)
+ for user in users:
+ if user["user_id"] == qq and user["role"] in ("owner", "admin"):
+ res.append({"id": group_id, "name": group["group_name"]})
+ return res
+
+
+@router.get("/auth")
+async def auth(token: str) -> TokenResp:
+ if qq_tuple := token_manager.get_user(token):
+ qq, nickname = qq_tuple
+ if str(qq) in nonebot.get_driver().config.superusers:
+ jwt_obj = {
+ "id": qq,
+ "type": "admin",
+ "groups": [
+ {
+ "id": info["group_id"],
+ "name": info["group_name"],
+ }
+ for info in await get_groups()
+ ],
+ }
+ ret_obj = TokenResp(
+ type="admin",
+ name=nickname,
+ id=qq,
+ token=pack_jwt(jwt_obj),
+ )
+ return ret_obj
+ if admin_groups := await get_admin_groups(int(qq)):
+ jwt_obj = {"id": str(qq), "type": "user", "groups": admin_groups}
+ ret_obj = TokenResp(
+ type="user",
+ name=nickname,
+ id=qq,
+ token=pack_jwt(jwt_obj),
+ )
+ return ret_obj
+ else:
+ raise HTTPException(400, "permission denied")
+ else:
+ raise HTTPException(400, "code error")
+
+
+@router.get("/subs")
+async def get_subs_info(jwt_obj: dict = Depends(get_jwt_obj)) -> SubscribeResp:
+ groups = jwt_obj["groups"]
+ res: SubscribeResp = {}
+ for group in groups:
+ group_id = group["id"]
+ raw_subs = await config.list_subscribe(TargetQQGroup(group_id=group_id))
+ subs = [
+ SubscribeConfig(
+ platformName=sub.target.platform_name,
+ targetName=sub.target.target_name,
+ cats=sub.categories,
+ tags=sub.tags,
+ target=sub.target.target,
+ )
+ for sub in raw_subs
+ ]
+ res[group_id] = SubscribeGroupDetail(name=group["name"], subscribes=subs)
+ return res
+
+
+@router.get("/target_name", dependencies=[Depends(get_jwt_obj)])
+async def get_target_name(platformName: str, target: str):
+ return {"targetName": await check_sub_target(platformName, T_Target(target))}
+
+
+@router.post("/subs", dependencies=[Depends(check_group_permission)])
+async def add_group_sub(groupNumber: int, req: AddSubscribeReq) -> StatusResp:
+ try:
+ await config.add_subscribe(
+ TargetQQGroup(group_id=groupNumber),
+ T_Target(req.target),
+ req.targetName,
+ req.platformName,
+ req.cats,
+ req.tags,
+ )
+ return StatusResp(ok=True, msg="")
+ except SubscribeDupException:
+ raise HTTPException(status.HTTP_400_BAD_REQUEST, "subscribe duplicated")
+
+
+@router.delete("/subs", dependencies=[Depends(check_group_permission)])
+async def del_group_sub(groupNumber: int, platformName: str, target: str):
+ try:
+ await config.del_subscribe(TargetQQGroup(group_id=groupNumber), target, platformName)
+ except (NoSuchUserException, NoSuchSubscribeException):
+ raise HTTPException(status.HTTP_400_BAD_REQUEST, "no such user or subscribe")
+ return StatusResp(ok=True, msg="")
+
+
+@router.patch("/subs", dependencies=[Depends(check_group_permission)])
+async def update_group_sub(groupNumber: int, req: AddSubscribeReq):
+ try:
+ await config.update_subscribe(
+ TargetQQGroup(group_id=groupNumber),
+ req.target,
+ req.targetName,
+ req.platformName,
+ req.cats,
+ req.tags,
+ )
+ except (NoSuchUserException, NoSuchSubscribeException):
+ raise HTTPException(status.HTTP_400_BAD_REQUEST, "no such user or subscribe")
+ return StatusResp(ok=True, msg="")
+
+
+@router.get("/weight", dependencies=[Depends(check_is_superuser)])
+async def get_weight_config():
+ return await config.get_all_weight_config()
+
+
+@router.put("/weight", dependencies=[Depends(check_is_superuser)])
+async def update_weigth_config(platformName: str, target: str, weight_config: WeightConfig):
+ try:
+ await config.update_time_weight_config(T_Target(target), platformName, weight_config)
+ except NoSuchTargetException:
+ raise HTTPException(status.HTTP_400_BAD_REQUEST, "no such subscribe")
+ return StatusResp(ok=True, msg="")
diff --git a/src/plugins/nonebot_bison/admin_page/dist/.gitkeep b/src/plugins/nonebot_bison/admin_page/dist/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/src/plugins/nonebot_bison/admin_page/jwt.py b/src/plugins/nonebot_bison/admin_page/jwt.py
new file mode 100644
index 00000000..866c184d
--- /dev/null
+++ b/src/plugins/nonebot_bison/admin_page/jwt.py
@@ -0,0 +1,22 @@
+import random
+import string
+import datetime
+
+import jwt
+
+_key = "".join(random.SystemRandom().choice(string.ascii_letters) for _ in range(16))
+
+
+def pack_jwt(obj: dict) -> str:
+ return jwt.encode(
+ {"exp": datetime.datetime.utcnow() + datetime.timedelta(hours=1), **obj},
+ _key,
+ algorithm="HS256",
+ )
+
+
+def load_jwt(token: str) -> dict | None:
+ try:
+ return jwt.decode(token, _key, algorithms=["HS256"])
+ except Exception:
+ return None
diff --git a/src/plugins/nonebot_bison/admin_page/token_manager.py b/src/plugins/nonebot_bison/admin_page/token_manager.py
new file mode 100644
index 00000000..bb62d0ad
--- /dev/null
+++ b/src/plugins/nonebot_bison/admin_page/token_manager.py
@@ -0,0 +1,25 @@
+import random
+import string
+
+from expiringdict import ExpiringDict
+
+
+class TokenManager:
+ def __init__(self):
+ self.token_manager = ExpiringDict(max_len=100, max_age_seconds=60 * 10)
+
+ def get_user(self, token: str) -> tuple | None:
+ res = self.token_manager.get(token)
+ assert res is None or isinstance(res, tuple)
+ return res
+
+ def save_user(self, token: str, qq: tuple) -> None:
+ self.token_manager[token] = qq
+
+ def get_user_token(self, qq: tuple) -> str:
+ token = "".join(random.choices(string.ascii_letters + string.digits, k=16))
+ self.save_user(token, qq)
+ return token
+
+
+token_manager = TokenManager()
diff --git a/src/plugins/nonebot_bison/admin_page/types.py b/src/plugins/nonebot_bison/admin_page/types.py
new file mode 100644
index 00000000..7a18b67f
--- /dev/null
+++ b/src/plugins/nonebot_bison/admin_page/types.py
@@ -0,0 +1,52 @@
+from pydantic import BaseModel
+
+
+class PlatformConfig(BaseModel):
+ name: str
+ categories: dict[int, str]
+ enabledTag: bool
+ platformName: str
+ hasTarget: bool
+
+
+AllPlatformConf = dict[str, PlatformConfig]
+
+
+class GlobalConf(BaseModel):
+ platformConf: AllPlatformConf
+
+
+class TokenResp(BaseModel):
+ token: str
+ type: str
+ id: int
+ name: str
+
+
+class SubscribeConfig(BaseModel):
+ platformName: str
+ target: str
+ targetName: str
+ cats: list[int]
+ tags: list[str]
+
+
+class SubscribeGroupDetail(BaseModel):
+ name: str
+ subscribes: list[SubscribeConfig]
+
+
+SubscribeResp = dict[int, SubscribeGroupDetail]
+
+
+class AddSubscribeReq(BaseModel):
+ platformName: str
+ target: str
+ targetName: str
+ cats: list[int]
+ tags: list[str]
+
+
+class StatusResp(BaseModel):
+ ok: bool
+ msg: str
diff --git a/src/plugins/nonebot_bison/apis.py b/src/plugins/nonebot_bison/apis.py
new file mode 100644
index 00000000..6d5130ea
--- /dev/null
+++ b/src/plugins/nonebot_bison/apis.py
@@ -0,0 +1,12 @@
+from .types import Target
+from .scheduler import scheduler_dict
+from .platform import platform_manager
+
+
+async def check_sub_target(platform_name: str, target: Target):
+ platform = platform_manager[platform_name]
+ scheduler_conf_class = platform.scheduler
+ scheduler = scheduler_dict[scheduler_conf_class]
+ client = await scheduler.scheduler_config_obj.get_query_name_client()
+
+ return await platform_manager[platform_name].get_target_name(client, target)
diff --git a/src/plugins/nonebot_bison/bootstrap.py b/src/plugins/nonebot_bison/bootstrap.py
new file mode 100644
index 00000000..92d06a84
--- /dev/null
+++ b/src/plugins/nonebot_bison/bootstrap.py
@@ -0,0 +1,49 @@
+from nonebot.log import logger
+from sqlalchemy import text, inspect
+from nonebot_plugin_datastore.db import get_engine, pre_db_init, post_db_init
+
+from .config.db_migration import data_migrate
+from .scheduler.manager import init_scheduler
+from .config.config_legacy import start_up as legacy_db_startup
+
+
+@pre_db_init
+async def pre():
+ def _has_table(conn, table_name):
+ insp = inspect(conn)
+ return insp.has_table(table_name)
+
+ async with get_engine().begin() as conn:
+ if not await conn.run_sync(_has_table, "alembic_version"):
+ logger.debug("未发现默认版本数据库,开始初始化")
+ return
+
+ logger.debug("发现默认版本数据库,开始检查版本")
+ t = await conn.scalar(text("select version_num from alembic_version"))
+ if t not in [
+ "4a46ba54a3f3", # alter_type
+ "5f3370328e44", # add_time_weight_table
+ "0571870f5222", # init_db
+ "a333d6224193", # add_last_scheduled_time
+ "c97c445e2bdb", # add_constraint
+ ]:
+ logger.warning(f"当前数据库版本:{t},不是插件的版本,已跳过。")
+ return
+
+ logger.debug(f"当前数据库版本:{t},是插件的版本,开始迁移。")
+ # 删除可能存在的版本数据库
+ if await conn.run_sync(_has_table, "nonebot_bison_alembic_version"):
+ await conn.execute(text("drop table nonebot_bison_alembic_version"))
+
+ await conn.execute(text("alter table alembic_version rename to nonebot_bison_alembic_version"))
+
+
+@post_db_init
+async def post():
+ # legacy db
+ legacy_db_startup()
+ # migrate data
+ await data_migrate()
+ # init scheduler
+ await init_scheduler()
+ logger.info("nonebot-bison bootstrap done")
diff --git a/src/plugins/nonebot_bison/compat.py b/src/plugins/nonebot_bison/compat.py
new file mode 100644
index 00000000..d4a65a5b
--- /dev/null
+++ b/src/plugins/nonebot_bison/compat.py
@@ -0,0 +1,28 @@
+from typing import Literal, overload
+
+from pydantic import BaseModel
+from nonebot.compat import PYDANTIC_V2
+
+__all__ = ("model_validator", "model_rebuild")
+
+
+if PYDANTIC_V2:
+ from pydantic import model_validator as model_validator
+
+ def model_rebuild(model: type[BaseModel]):
+ return model.model_rebuild()
+
+else:
+ from pydantic import root_validator
+
+ @overload
+ def model_validator(*, mode: Literal["before"]): ...
+
+ @overload
+ def model_validator(*, mode: Literal["after"]): ...
+
+ def model_validator(*, mode: Literal["before", "after"]):
+ return root_validator(pre=mode == "before", allow_reuse=True)
+
+ def model_rebuild(model: type[BaseModel]):
+ return model.update_forward_refs()
diff --git a/src/plugins/nonebot_bison/config/__init__.py b/src/plugins/nonebot_bison/config/__init__.py
new file mode 100644
index 00000000..a04d41f0
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/__init__.py
@@ -0,0 +1,4 @@
+from .db_config import config as config
+from .utils import NoSuchUserException as NoSuchUserException
+from .utils import NoSuchTargetException as NoSuchTargetException
+from .utils import NoSuchSubscribeException as NoSuchSubscribeException
diff --git a/src/plugins/nonebot_bison/config/config_legacy.py b/src/plugins/nonebot_bison/config/config_legacy.py
new file mode 100644
index 00000000..24e7e4dd
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/config_legacy.py
@@ -0,0 +1,252 @@
+import os
+import json
+from os import path
+from pathlib import Path
+from datetime import datetime
+from collections import defaultdict
+from typing import Literal, TypedDict
+
+from nonebot.log import logger
+from tinydb import Query, TinyDB
+
+from ..utils import Singleton
+from ..types import User, Target
+from ..platform import platform_manager
+from ..plugin_config import plugin_config
+from .utils import NoSuchUserException, NoSuchSubscribeException
+
+supported_target_type = platform_manager.keys()
+
+
+def get_config_path() -> tuple[str, str]:
+ if plugin_config.bison_config_path:
+ data_dir = plugin_config.bison_config_path
+ else:
+ working_dir = os.getcwd()
+ data_dir = path.join(working_dir, "data")
+ old_path = path.join(data_dir, "hk_reporter.json")
+ new_path = path.join(data_dir, "bison.json")
+ deprecated_maker_path = path.join(data_dir, "bison.json.deprecated")
+ if os.path.exists(old_path) and not os.path.exists(new_path):
+ os.rename(old_path, new_path)
+ return new_path, deprecated_maker_path
+
+
+def drop():
+ config = Config()
+ if plugin_config.bison_config_path:
+ data_dir = plugin_config.bison_config_path
+ else:
+ working_dir = os.getcwd()
+ data_dir = path.join(working_dir, "data")
+ old_path = path.join(data_dir, "bison.json")
+ deprecated_marker_path = path.join(data_dir, "bison.json.deprecated")
+ if os.path.exists(old_path):
+ config.db.close()
+ config.available = False
+ with open(deprecated_marker_path, "w") as file:
+ content = {
+ "migration_time": datetime.now().isoformat(),
+ }
+ file.write(json.dumps(content))
+ return True
+ return False
+
+
+class SubscribeContent(TypedDict):
+ target: str
+ target_type: str
+ target_name: str
+ cats: list[int]
+ tags: list[str]
+
+
+class ConfigContent(TypedDict):
+ user: int
+ user_type: Literal["group", "private"]
+ subs: list[SubscribeContent]
+
+
+class Config(metaclass=Singleton):
+ "Dropping it!"
+
+ migrate_version = 2
+
+ def __init__(self):
+ self._do_init()
+
+ def _do_init(self):
+ path, deprecated_marker_path = get_config_path()
+ if Path(deprecated_marker_path).exists():
+ self.available = False
+ elif Path(path).exists():
+ self.available = True
+ self.db = TinyDB(path, encoding="utf-8")
+ self.kv_config = self.db.table("kv")
+ self.user_target = self.db.table("user_target")
+ self.target_user_cache: dict[str, defaultdict[Target, list[User]]] = {}
+ self.target_user_cat_cache = {}
+ self.target_user_tag_cache = {}
+ self.target_list = {}
+ self.next_index: defaultdict[str, int] = defaultdict(lambda: 0)
+ else:
+ self.available = False
+
+ def add_subscribe(self, user, user_type, target, target_name, target_type, cats, tags):
+ user_query = Query()
+ query = (user_query.user == user) & (user_query.user_type == user_type)
+ if user_data := self.user_target.get(query):
+ # update
+ assert not isinstance(user_data, list)
+ subs: list = user_data.get("subs", [])
+ subs.append(
+ {
+ "target": target,
+ "target_type": target_type,
+ "target_name": target_name,
+ "cats": cats,
+ "tags": tags,
+ }
+ )
+ self.user_target.update({"subs": subs}, query)
+ else:
+ # insert
+ self.user_target.insert(
+ {
+ "user": user,
+ "user_type": user_type,
+ "subs": [
+ {
+ "target": target,
+ "target_type": target_type,
+ "target_name": target_name,
+ "cats": cats,
+ "tags": tags,
+ }
+ ],
+ }
+ )
+ self.update_send_cache()
+
+ def list_subscribe(self, user, user_type) -> list[SubscribeContent]:
+ query = Query()
+ if user_sub := self.user_target.get((query.user == user) & (query.user_type == user_type)):
+ assert not isinstance(user_sub, list)
+ return user_sub["subs"]
+ return []
+
+ def get_all_subscribe(self):
+ return self.user_target
+
+ def del_subscribe(self, user, user_type, target, target_type):
+ user_query = Query()
+ query = (user_query.user == user) & (user_query.user_type == user_type)
+ if not (query_res := self.user_target.get(query)):
+ raise NoSuchUserException()
+ assert not isinstance(query_res, list)
+ subs = query_res.get("subs", [])
+ for idx, sub in enumerate(subs):
+ if sub.get("target") == target and sub.get("target_type") == target_type:
+ subs.pop(idx)
+ self.user_target.update({"subs": subs}, query)
+ self.update_send_cache()
+ return
+ raise NoSuchSubscribeException()
+
+ def update_subscribe(self, user, user_type, target, target_name, target_type, cats, tags):
+ user_query = Query()
+ query = (user_query.user == user) & (user_query.user_type == user_type)
+ if user_data := self.user_target.get(query):
+ # update
+ assert not isinstance(user_data, list)
+ subs: list = user_data.get("subs", [])
+ find_flag = False
+ for item in subs:
+ if item["target"] == target and item["target_type"] == target_type:
+ item["target_name"], item["cats"], item["tags"] = (
+ target_name,
+ cats,
+ tags,
+ )
+ find_flag = True
+ break
+ if not find_flag:
+ raise NoSuchSubscribeException()
+ self.user_target.update({"subs": subs}, query)
+ else:
+ raise NoSuchUserException()
+ self.update_send_cache()
+
+ def update_send_cache(self):
+ res = {target_type: defaultdict(list) for target_type in supported_target_type}
+ cat_res = {target_type: defaultdict(lambda: defaultdict(list)) for target_type in supported_target_type}
+ tag_res = {target_type: defaultdict(lambda: defaultdict(list)) for target_type in supported_target_type}
+ # res = {target_type: defaultdict(lambda: defaultdict(list)) for target_type in supported_target_type}
+ to_del = []
+ for user in self.user_target.all():
+ for sub in user.get("subs", []):
+ if sub.get("target_type") not in supported_target_type:
+ to_del.append(
+ {
+ "user": user["user"],
+ "user_type": user["user_type"],
+ "target": sub["target"],
+ "target_type": sub["target_type"],
+ }
+ )
+ continue
+ res[sub["target_type"]][sub["target"]].append(User(user["user"], user["user_type"]))
+ cat_res[sub["target_type"]][sub["target"]]["{}-{}".format(user["user_type"], user["user"])] = sub[
+ "cats"
+ ]
+ tag_res[sub["target_type"]][sub["target"]]["{}-{}".format(user["user_type"], user["user"])] = sub[
+ "tags"
+ ]
+ self.target_user_cache = res
+ self.target_user_cat_cache = cat_res
+ self.target_user_tag_cache = tag_res
+ for target_type in self.target_user_cache:
+ self.target_list[target_type] = list(self.target_user_cache[target_type].keys())
+
+ logger.info(f"Deleting {to_del}")
+ for d in to_del:
+ self.del_subscribe(**d)
+
+ def get_sub_category(self, target_type, target, user_type, user):
+ return self.target_user_cat_cache[target_type][target][f"{user_type}-{user}"]
+
+ def get_sub_tags(self, target_type, target, user_type, user):
+ return self.target_user_tag_cache[target_type][target][f"{user_type}-{user}"]
+
+ def get_next_target(self, target_type):
+ # FIXME 插入或删除target后对队列的影响(但是并不是大问题
+ if not self.target_list[target_type]:
+ return None
+ self.next_index[target_type] %= len(self.target_list[target_type])
+ res = self.target_list[target_type][self.next_index[target_type]]
+ self.next_index[target_type] += 1
+ return res
+
+
+def start_up():
+ config = Config()
+ if not config.available:
+ return
+ if not (search_res := config.kv_config.search(Query().name == "version")):
+ config.kv_config.insert({"name": "version", "value": config.migrate_version})
+ elif search_res[0].get("value") < config.migrate_version: # type: ignore
+ query = Query()
+ version_query = query.name == "version"
+ cur_version = search_res[0].get("value")
+ if cur_version == 1:
+ cur_version = 2
+ for user_conf in config.user_target.all():
+ conf_id = user_conf.doc_id
+ subs = user_conf["subs"]
+ for sub in subs:
+ sub["cats"] = []
+ sub["tags"] = []
+ config.user_target.update({"subs": subs}, doc_ids=[conf_id])
+ config.kv_config.update({"value": config.migrate_version}, version_query)
+ # do migration
+ config.update_send_cache()
diff --git a/src/plugins/nonebot_bison/config/db_config.py b/src/plugins/nonebot_bison/config/db_config.py
new file mode 100644
index 00000000..157b1ef6
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/db_config.py
@@ -0,0 +1,263 @@
+import asyncio
+from collections import defaultdict
+from datetime import time, datetime
+from collections.abc import Callable, Sequence, Awaitable
+
+from nonebot.compat import model_dump
+from sqlalchemy.orm import selectinload
+from sqlalchemy.exc import IntegrityError
+from sqlalchemy import func, delete, select
+from nonebot_plugin_saa import PlatformTarget
+from nonebot_plugin_datastore import create_session
+
+from ..types import Tag
+from ..types import Target as T_Target
+from .utils import NoSuchTargetException
+from .db_model import User, Target, Subscribe, ScheduleTimeWeight
+from ..types import Category, UserSubInfo, WeightConfig, TimeWeightConfig, PlatformWeightConfigResp
+
+
+def _get_time():
+ dt = datetime.now()
+ cur_time = time(hour=dt.hour, minute=dt.minute, second=dt.second)
+ return cur_time
+
+
+class SubscribeDupException(Exception): ...
+
+
+class DBConfig:
+ def __init__(self):
+ self.add_target_hook: list[Callable[[str, T_Target], Awaitable]] = []
+ self.delete_target_hook: list[Callable[[str, T_Target], Awaitable]] = []
+
+ def register_add_target_hook(self, fun: Callable[[str, T_Target], Awaitable]):
+ self.add_target_hook.append(fun)
+
+ def register_delete_target_hook(self, fun: Callable[[str, T_Target], Awaitable]):
+ self.delete_target_hook.append(fun)
+
+ async def add_subscribe(
+ self,
+ user: PlatformTarget,
+ target: T_Target,
+ target_name: str,
+ platform_name: str,
+ cats: list[Category],
+ tags: list[Tag],
+ ):
+ async with create_session() as session:
+ db_user_stmt = select(User).where(User.user_target == model_dump(user))
+ db_user: User | None = await session.scalar(db_user_stmt)
+ if not db_user:
+ db_user = User(user_target=model_dump(user))
+ session.add(db_user)
+ db_target_stmt = select(Target).where(Target.platform_name == platform_name).where(Target.target == target)
+ db_target: Target | None = await session.scalar(db_target_stmt)
+ if not db_target:
+ db_target = Target(target=target, platform_name=platform_name, target_name=target_name)
+ await asyncio.gather(*[hook(platform_name, target) for hook in self.add_target_hook])
+ else:
+ db_target.target_name = target_name
+ subscribe = Subscribe(
+ categories=cats,
+ tags=tags,
+ user=db_user,
+ target=db_target,
+ )
+ session.add(subscribe)
+ try:
+ await session.commit()
+ except IntegrityError as e:
+ if len(e.args) > 0 and "UNIQUE constraint failed" in e.args[0]:
+ raise SubscribeDupException()
+ raise e
+
+ async def list_subscribe(self, user: PlatformTarget) -> Sequence[Subscribe]:
+ async with create_session() as session:
+ query_stmt = (
+ select(Subscribe)
+ .where(User.user_target == model_dump(user))
+ .join(User)
+ .options(selectinload(Subscribe.target))
+ )
+ subs = (await session.scalars(query_stmt)).all()
+ return subs
+
+ async def list_subs_with_all_info(self) -> Sequence[Subscribe]:
+ """获取数据库中带有user、target信息的subscribe数据"""
+ async with create_session() as session:
+ query_stmt = (
+ select(Subscribe).join(User).options(selectinload(Subscribe.target), selectinload(Subscribe.user))
+ )
+ subs = (await session.scalars(query_stmt)).all()
+
+ return subs
+
+ async def del_subscribe(self, user: PlatformTarget, target: str, platform_name: str):
+ async with create_session() as session:
+ user_obj = await session.scalar(select(User).where(User.user_target == model_dump(user)))
+ target_obj = await session.scalar(
+ select(Target).where(Target.platform_name == platform_name, Target.target == target)
+ )
+ await session.execute(delete(Subscribe).where(Subscribe.user == user_obj, Subscribe.target == target_obj))
+ target_count = await session.scalar(
+ select(func.count()).select_from(Subscribe).where(Subscribe.target == target_obj)
+ )
+ if target_count == 0:
+ # delete empty target
+ await asyncio.gather(*[hook(platform_name, T_Target(target)) for hook in self.delete_target_hook])
+ await session.commit()
+
+ async def update_subscribe(
+ self,
+ user: PlatformTarget,
+ target: str,
+ target_name: str,
+ platform_name: str,
+ cats: list,
+ tags: list,
+ ):
+ async with create_session() as sess:
+ subscribe_obj: Subscribe = await sess.scalar(
+ select(Subscribe)
+ .where(
+ User.user_target == model_dump(user),
+ Target.target == target,
+ Target.platform_name == platform_name,
+ )
+ .join(User)
+ .join(Target)
+ .options(selectinload(Subscribe.target)) # type:ignore
+ )
+ subscribe_obj.tags = tags # type:ignore
+ subscribe_obj.categories = cats # type:ignore
+ subscribe_obj.target.target_name = target_name
+ await sess.commit()
+
+ async def get_platform_target(self, platform_name: str) -> Sequence[Target]:
+ async with create_session() as sess:
+ subq = select(Subscribe.target_id).distinct().subquery()
+ query = select(Target).join(subq).where(Target.platform_name == platform_name)
+ return (await sess.scalars(query)).all()
+
+ async def get_time_weight_config(self, target: T_Target, platform_name: str) -> WeightConfig:
+ async with create_session() as sess:
+ time_weight_conf = (
+ await sess.scalars(
+ select(ScheduleTimeWeight)
+ .where(Target.platform_name == platform_name, Target.target == target)
+ .join(Target)
+ )
+ ).all()
+ targetObj = await sess.scalar(
+ select(Target).where(Target.platform_name == platform_name, Target.target == target)
+ )
+ assert targetObj
+ return WeightConfig(
+ default=targetObj.default_schedule_weight,
+ time_config=[
+ TimeWeightConfig(
+ start_time=time_conf.start_time,
+ end_time=time_conf.end_time,
+ weight=time_conf.weight,
+ )
+ for time_conf in time_weight_conf
+ ],
+ )
+
+ async def update_time_weight_config(self, target: T_Target, platform_name: str, conf: WeightConfig):
+ async with create_session() as sess:
+ targetObj = await sess.scalar(
+ select(Target).where(Target.platform_name == platform_name, Target.target == target)
+ )
+ if not targetObj:
+ raise NoSuchTargetException()
+ target_id = targetObj.id
+ targetObj.default_schedule_weight = conf.default
+ delete_statement = delete(ScheduleTimeWeight).where(ScheduleTimeWeight.target_id == target_id)
+ await sess.execute(delete_statement)
+ for time_conf in conf.time_config:
+ new_conf = ScheduleTimeWeight(
+ start_time=time_conf.start_time,
+ end_time=time_conf.end_time,
+ weight=time_conf.weight,
+ target=targetObj,
+ )
+ sess.add(new_conf)
+
+ await sess.commit()
+
+ async def get_current_weight_val(self, platform_list: list[str]) -> dict[str, int]:
+ res = {}
+ cur_time = _get_time()
+ async with create_session() as sess:
+ targets = (
+ await sess.scalars(
+ select(Target)
+ .where(Target.platform_name.in_(platform_list))
+ .options(selectinload(Target.time_weight))
+ )
+ ).all()
+ for target in targets:
+ key = f"{target.platform_name}-{target.target}"
+ weight = target.default_schedule_weight
+ for time_conf in target.time_weight:
+ if time_conf.start_time <= cur_time and time_conf.end_time > cur_time:
+ weight = time_conf.weight
+ break
+ res[key] = weight
+ return res
+
+ async def get_platform_target_subscribers(self, platform_name: str, target: T_Target) -> list[UserSubInfo]:
+ async with create_session() as sess:
+ query = (
+ select(Subscribe)
+ .join(Target)
+ .where(Target.platform_name == platform_name, Target.target == target)
+ .options(selectinload(Subscribe.user))
+ )
+ subsribes = (await sess.scalars(query)).all()
+ return [
+ UserSubInfo(
+ PlatformTarget.deserialize(subscribe.user.user_target),
+ subscribe.categories,
+ subscribe.tags,
+ )
+ for subscribe in subsribes
+ ]
+
+ async def get_all_weight_config(
+ self,
+ ) -> dict[str, dict[str, PlatformWeightConfigResp]]:
+ res: dict[str, dict[str, PlatformWeightConfigResp]] = defaultdict(dict)
+ async with create_session() as sess:
+ query = select(Target)
+ targets = (await sess.scalars(query)).all()
+ query = select(ScheduleTimeWeight).options(selectinload(ScheduleTimeWeight.target))
+ time_weights = (await sess.scalars(query)).all()
+
+ for target in targets:
+ platform_name = target.platform_name
+ if platform_name not in res.keys():
+ res[platform_name][target.target] = PlatformWeightConfigResp(
+ target=T_Target(target.target),
+ target_name=target.target_name,
+ platform_name=platform_name,
+ weight=WeightConfig(default=target.default_schedule_weight, time_config=[]),
+ )
+
+ for time_weight_config in time_weights:
+ platform_name = time_weight_config.target.platform_name
+ target = time_weight_config.target.target
+ res[platform_name][target].weight.time_config.append(
+ TimeWeightConfig(
+ start_time=time_weight_config.start_time,
+ end_time=time_weight_config.end_time,
+ weight=time_weight_config.weight,
+ )
+ )
+ return res
+
+
+config = DBConfig()
diff --git a/src/plugins/nonebot_bison/config/db_migration.py b/src/plugins/nonebot_bison/config/db_migration.py
new file mode 100644
index 00000000..75080ad6
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/db_migration.py
@@ -0,0 +1,71 @@
+from nonebot.log import logger
+from nonebot.compat import model_dump
+from nonebot_plugin_datastore.db import get_engine
+from sqlalchemy.ext.asyncio.session import AsyncSession
+from nonebot_plugin_saa import TargetQQGroup, TargetQQPrivate
+
+from .db_model import User, Target, Subscribe
+from .config_legacy import Config, ConfigContent, drop
+
+
+async def data_migrate():
+ config = Config()
+ if config.available:
+ logger.warning("You are still using legacy db, migrating to sqlite")
+ all_subs: list[ConfigContent] = [ConfigContent(**item) for item in config.get_all_subscribe().all()]
+ async with AsyncSession(get_engine()) as sess:
+ user_to_create = []
+ subscribe_to_create = []
+ platform_target_map: dict[str, tuple[Target, str, int]] = {}
+ for user in all_subs:
+ if user["user_type"] == "group":
+ user_target = TargetQQGroup(group_id=user["user"])
+ else:
+ user_target = TargetQQPrivate(user_id=user["user"])
+ db_user = User(user_target=model_dump(user_target))
+ user_to_create.append(db_user)
+ user_sub_set = set()
+ for sub in user["subs"]:
+ target = sub["target"]
+ platform_name = sub["target_type"]
+ target_name = sub["target_name"]
+ key = f"{target}-{platform_name}"
+ if key in user_sub_set:
+ # a user subscribe a target twice
+ logger.error(
+ f"用户 {user['user_type']}-{user['user']} 订阅了 {platform_name}-{target_name} 两次,"
+ "随机采用了一个订阅",
+ )
+ continue
+ user_sub_set.add(key)
+ if key in platform_target_map.keys():
+ target_obj, ext_user_type, ext_user = platform_target_map[key]
+ if target_obj.target_name != target_name:
+ # GG
+ logger.error(
+ f"你的旧版本数据库中存在数据不一致问题,请完成迁移后执行重新添加{platform_name}平台的{target}"
+ f"它的名字可能为{target_obj.target_name}或{target_name}"
+ )
+
+ else:
+ target_obj = Target(
+ platform_name=platform_name,
+ target_name=target_name,
+ target=target,
+ )
+ platform_target_map[key] = (
+ target_obj,
+ user["user_type"],
+ user["user"],
+ )
+ subscribe_obj = Subscribe(
+ user=db_user,
+ target=target_obj,
+ categories=sub["cats"],
+ tags=sub["tags"],
+ )
+ subscribe_to_create.append(subscribe_obj)
+ sess.add_all(user_to_create + [x[0] for x in platform_target_map.values()] + subscribe_to_create)
+ await sess.commit()
+ drop()
+ logger.info("migrate success")
diff --git a/src/plugins/nonebot_bison/config/db_model.py b/src/plugins/nonebot_bison/config/db_model.py
new file mode 100644
index 00000000..849094d1
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/db_model.py
@@ -0,0 +1,68 @@
+import datetime
+from pathlib import Path
+
+from nonebot_plugin_saa import PlatformTarget
+from sqlalchemy.dialects.postgresql import JSONB
+from nonebot.compat import PYDANTIC_V2, ConfigDict
+from nonebot_plugin_datastore import get_plugin_data
+from sqlalchemy.orm import Mapped, relationship, mapped_column
+from sqlalchemy import JSON, String, ForeignKey, UniqueConstraint
+
+from ..types import Tag, Category
+
+Model = get_plugin_data().Model
+get_plugin_data().set_migration_dir(Path(__file__).parent / "migrations")
+
+
+class User(Model):
+ id: Mapped[int] = mapped_column(primary_key=True)
+ user_target: Mapped[dict] = mapped_column(JSON().with_variant(JSONB, "postgresql"))
+
+ subscribes: Mapped[list["Subscribe"]] = relationship(back_populates="user")
+
+ @property
+ def saa_target(self) -> PlatformTarget:
+ return PlatformTarget.deserialize(self.user_target)
+
+
+class Target(Model):
+ __table_args__ = (UniqueConstraint("target", "platform_name", name="unique-target-constraint"),)
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ platform_name: Mapped[str] = mapped_column(String(20))
+ target: Mapped[str] = mapped_column(String(1024))
+ target_name: Mapped[str] = mapped_column(String(1024))
+ default_schedule_weight: Mapped[int] = mapped_column(default=10)
+
+ subscribes: Mapped[list["Subscribe"]] = relationship(back_populates="target")
+ time_weight: Mapped[list["ScheduleTimeWeight"]] = relationship(back_populates="target")
+
+
+class ScheduleTimeWeight(Model):
+ id: Mapped[int] = mapped_column(primary_key=True)
+ target_id: Mapped[int] = mapped_column(ForeignKey("nonebot_bison_target.id"))
+ start_time: Mapped[datetime.time]
+ end_time: Mapped[datetime.time]
+ weight: Mapped[int]
+
+ target: Mapped[Target] = relationship(back_populates="time_weight")
+
+ if PYDANTIC_V2:
+ model_config = ConfigDict(arbitrary_types_allowed=True)
+ else:
+
+ class Config:
+ arbitrary_types_allowed = True
+
+
+class Subscribe(Model):
+ __table_args__ = (UniqueConstraint("target_id", "user_id", name="unique-subscribe-constraint"),)
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ target_id: Mapped[int] = mapped_column(ForeignKey("nonebot_bison_target.id"))
+ user_id: Mapped[int] = mapped_column(ForeignKey("nonebot_bison_user.id"))
+ categories: Mapped[list[Category]] = mapped_column(JSON)
+ tags: Mapped[list[Tag]] = mapped_column(JSON)
+
+ target: Mapped[Target] = relationship(back_populates="subscribes")
+ user: Mapped[User] = relationship(back_populates="subscribes")
diff --git a/src/plugins/nonebot_bison/config/migrations/0571870f5222_init_db.py b/src/plugins/nonebot_bison/config/migrations/0571870f5222_init_db.py
new file mode 100644
index 00000000..391433f1
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/0571870f5222_init_db.py
@@ -0,0 +1,61 @@
+"""init db
+
+Revision ID: 0571870f5222
+Revises:
+Create Date: 2022-03-21 19:18:13.762626
+
+"""
+
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "0571870f5222"
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table(
+ "target",
+ sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
+ sa.Column("platform_name", sa.String(length=20), nullable=False),
+ sa.Column("target", sa.String(length=1024), nullable=False),
+ sa.Column("target_name", sa.String(length=1024), nullable=False),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.create_table(
+ "user",
+ sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
+ sa.Column("type", sa.String(length=20), nullable=False),
+ sa.Column("uid", sa.Integer(), nullable=False),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.create_table(
+ "subscribe",
+ sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
+ sa.Column("target_id", sa.Integer(), nullable=True),
+ sa.Column("user_id", sa.Integer(), nullable=True),
+ sa.Column("categories", sa.String(length=1024), nullable=True),
+ sa.Column("tags", sa.String(length=1024), nullable=True),
+ sa.ForeignKeyConstraint(
+ ["target_id"],
+ ["target.id"],
+ ),
+ sa.ForeignKeyConstraint(
+ ["user_id"],
+ ["user.id"],
+ ),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table("subscribe")
+ op.drop_table("user")
+ op.drop_table("target")
+ # ### end Alembic commands ###
diff --git a/src/plugins/nonebot_bison/config/migrations/4a46ba54a3f3_alter_type.py b/src/plugins/nonebot_bison/config/migrations/4a46ba54a3f3_alter_type.py
new file mode 100644
index 00000000..4dbeefed
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/4a46ba54a3f3_alter_type.py
@@ -0,0 +1,56 @@
+"""alter type
+
+Revision ID: 4a46ba54a3f3
+Revises: c97c445e2bdb
+Create Date: 2022-03-27 21:50:10.911649
+
+"""
+
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "4a46ba54a3f3"
+down_revision = "c97c445e2bdb"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("subscribe", schema=None) as batch_op:
+ batch_op.alter_column(
+ "categories",
+ existing_type=sa.VARCHAR(length=1024),
+ type_=sa.JSON(),
+ existing_nullable=True,
+ postgresql_using="categories::json",
+ )
+ batch_op.alter_column(
+ "tags",
+ existing_type=sa.VARCHAR(length=1024),
+ type_=sa.JSON(),
+ existing_nullable=True,
+ postgresql_using="tags::json",
+ )
+
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("subscribe", schema=None) as batch_op:
+ batch_op.alter_column(
+ "tags",
+ existing_type=sa.JSON(),
+ type_=sa.VARCHAR(length=1024),
+ existing_nullable=True,
+ )
+ batch_op.alter_column(
+ "categories",
+ existing_type=sa.JSON(),
+ type_=sa.VARCHAR(length=1024),
+ existing_nullable=True,
+ )
+
+ # ### end Alembic commands ###
diff --git a/src/plugins/nonebot_bison/config/migrations/5da28f6facb3_rename_tables.py b/src/plugins/nonebot_bison/config/migrations/5da28f6facb3_rename_tables.py
new file mode 100644
index 00000000..dd86893b
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/5da28f6facb3_rename_tables.py
@@ -0,0 +1,33 @@
+"""rename tables
+
+Revision ID: 5da28f6facb3
+Revises: 5f3370328e44
+Create Date: 2023-01-15 19:04:54.987491
+
+"""
+
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "5da28f6facb3"
+down_revision = "5f3370328e44"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.rename_table("target", "nonebot_bison_target")
+ op.rename_table("user", "nonebot_bison_user")
+ op.rename_table("schedule_time_weight", "nonebot_bison_scheduletimeweight")
+ op.rename_table("subscribe", "nonebot_bison_subscribe")
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.rename_table("nonebot_bison_subscribe", "subscribe")
+ op.rename_table("nonebot_bison_scheduletimeweight", "schedule_time_weight")
+ op.rename_table("nonebot_bison_user", "user")
+ op.rename_table("nonebot_bison_target", "target")
+ # ### end Alembic commands ###
diff --git a/src/plugins/nonebot_bison/config/migrations/5f3370328e44_add_time_weight_table.py b/src/plugins/nonebot_bison/config/migrations/5f3370328e44_add_time_weight_table.py
new file mode 100644
index 00000000..696dfa71
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/5f3370328e44_add_time_weight_table.py
@@ -0,0 +1,48 @@
+"""add time-weight table
+
+Revision ID: 5f3370328e44
+Revises: a333d6224193
+Create Date: 2022-05-31 22:05:13.235981
+
+"""
+
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "5f3370328e44"
+down_revision = "a333d6224193"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table(
+ "schedule_time_weight",
+ sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
+ sa.Column("target_id", sa.Integer(), nullable=True),
+ sa.Column("start_time", sa.Time(), nullable=True),
+ sa.Column("end_time", sa.Time(), nullable=True),
+ sa.Column("weight", sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(
+ ["target_id"],
+ ["target.id"],
+ ),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ with op.batch_alter_table("target", schema=None) as batch_op:
+ batch_op.add_column(sa.Column("default_schedule_weight", sa.Integer(), nullable=True))
+ batch_op.drop_column("last_schedule_time")
+
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("target", schema=None) as batch_op:
+ batch_op.add_column(sa.Column("last_schedule_time", sa.DATETIME(), nullable=True))
+ batch_op.drop_column("default_schedule_weight")
+
+ op.drop_table("schedule_time_weight")
+ # ### end Alembic commands ###
diff --git a/src/plugins/nonebot_bison/config/migrations/632b8086bc2b_add_user_target.py b/src/plugins/nonebot_bison/config/migrations/632b8086bc2b_add_user_target.py
new file mode 100644
index 00000000..a6f5e3a5
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/632b8086bc2b_add_user_target.py
@@ -0,0 +1,41 @@
+"""add user_target
+
+Revision ID: 632b8086bc2b
+Revises: aceef470d69c
+Create Date: 2023-03-20 00:39:30.199915
+
+"""
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy.dialects.postgresql import JSONB
+
+# revision identifiers, used by Alembic.
+revision = "632b8086bc2b"
+down_revision = "aceef470d69c"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("nonebot_bison_user", schema=None) as batch_op:
+ batch_op.drop_constraint("unique-user-constraint", type_="unique")
+ batch_op.add_column(
+ sa.Column(
+ "user_target",
+ sa.JSON().with_variant(JSONB, "postgresql"),
+ nullable=True,
+ )
+ )
+
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("nonebot_bison_user", schema=None) as batch_op:
+ batch_op.drop_column("user_target")
+ batch_op.create_unique_constraint("unique-user-constraint", ["type", "uid"])
+
+ # ### end Alembic commands ###
diff --git a/src/plugins/nonebot_bison/config/migrations/67c38b3f39c2_make_user_target_not_nullable.py b/src/plugins/nonebot_bison/config/migrations/67c38b3f39c2_make_user_target_not_nullable.py
new file mode 100644
index 00000000..1f3e07a7
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/67c38b3f39c2_make_user_target_not_nullable.py
@@ -0,0 +1,45 @@
+"""make user_target not nullable
+
+Revision ID: 67c38b3f39c2
+Revises: a5466912fad0
+Create Date: 2023-03-20 11:08:42.883556
+
+"""
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy.dialects.postgresql import JSONB
+
+# revision identifiers, used by Alembic.
+revision = "67c38b3f39c2"
+down_revision = "a5466912fad0"
+branch_labels = None
+depends_on = None
+
+
+def jsonb_if_postgresql_else_json():
+ return sa.JSON().with_variant(JSONB, "postgresql")
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("nonebot_bison_user", schema=None) as batch_op:
+ batch_op.alter_column(
+ "user_target",
+ existing_type=jsonb_if_postgresql_else_json(),
+ nullable=False,
+ )
+
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("nonebot_bison_user", schema=None) as batch_op:
+ batch_op.alter_column(
+ "user_target",
+ existing_type=jsonb_if_postgresql_else_json(),
+ nullable=True,
+ )
+
+ # ### end Alembic commands ###
diff --git a/src/plugins/nonebot_bison/config/migrations/8d3863e9d74b_remove_uid_and_type.py b/src/plugins/nonebot_bison/config/migrations/8d3863e9d74b_remove_uid_and_type.py
new file mode 100644
index 00000000..649e7f66
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/8d3863e9d74b_remove_uid_and_type.py
@@ -0,0 +1,34 @@
+"""remove uid and type
+
+Revision ID: 8d3863e9d74b
+Revises: 67c38b3f39c2
+Create Date: 2023-03-20 15:38:20.220599
+
+"""
+
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "8d3863e9d74b"
+down_revision = "67c38b3f39c2"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("nonebot_bison_user", schema=None) as batch_op:
+ batch_op.drop_column("uid")
+ batch_op.drop_column("type")
+
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("nonebot_bison_user", schema=None) as batch_op:
+ batch_op.add_column(sa.Column("type", sa.VARCHAR(length=20), nullable=False))
+ batch_op.add_column(sa.Column("uid", sa.INTEGER(), nullable=False))
+
+ # ### end Alembic commands ###
diff --git a/src/plugins/nonebot_bison/config/migrations/a333d6224193_add_last_scheduled_time.py b/src/plugins/nonebot_bison/config/migrations/a333d6224193_add_last_scheduled_time.py
new file mode 100644
index 00000000..ad0892bc
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/a333d6224193_add_last_scheduled_time.py
@@ -0,0 +1,32 @@
+"""add last scheduled time
+
+Revision ID: a333d6224193
+Revises: 4a46ba54a3f3
+Create Date: 2022-03-29 21:01:38.213153
+
+"""
+
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "a333d6224193"
+down_revision = "4a46ba54a3f3"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("target", schema=None) as batch_op:
+ batch_op.add_column(sa.Column("last_schedule_time", sa.DateTime(timezone=True), nullable=True))
+
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("target", schema=None) as batch_op:
+ batch_op.drop_column("last_schedule_time")
+
+ # ### end Alembic commands ###
diff --git a/src/plugins/nonebot_bison/config/migrations/a5466912fad0_map_user.py b/src/plugins/nonebot_bison/config/migrations/a5466912fad0_map_user.py
new file mode 100644
index 00000000..c89098f8
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/a5466912fad0_map_user.py
@@ -0,0 +1,52 @@
+"""map user
+
+Revision ID: a5466912fad0
+Revises: 632b8086bc2b
+Create Date: 2023-03-20 01:14:42.623789
+
+"""
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy.orm import Session
+from sqlalchemy.ext.automap import automap_base
+
+# revision identifiers, used by Alembic.
+revision = "a5466912fad0"
+down_revision = "632b8086bc2b"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ Base = automap_base()
+ Base.prepare(op.get_bind())
+ User = Base.classes.nonebot_bison_user
+ with Session(op.get_bind()) as sess:
+ users = sess.scalars(sa.select(User)).all()
+ for user in users:
+ if user.type == "group":
+ user.user_target = {"platform_type": "QQ Group", "group_id": user.uid}
+ elif user.type == "private":
+ user.user_target = {"platform_type": "QQ Private", "user_id": user.uid}
+ else:
+ sess.delete(user)
+ sess.add_all(users)
+ sess.commit()
+
+
+def downgrade() -> None:
+ Base = automap_base()
+ Base.prepare(op.get_bind())
+ User = Base.classes.nonebot_bison_user
+ with Session(op.get_bind()) as sess:
+ users = sess.scalars(sa.select(User)).all()
+ for user in users:
+ if user.user_target["platform_type"] == "QQ Group":
+ user.uid = user.user_target["group_id"]
+ user.type = "group"
+ else:
+ user.uid = user.user_target["user_id"]
+ user.type = "private"
+ sess.add_all(users)
+ sess.commit()
diff --git a/src/plugins/nonebot_bison/config/migrations/aceef470d69c_alter_fields_not_null.py b/src/plugins/nonebot_bison/config/migrations/aceef470d69c_alter_fields_not_null.py
new file mode 100644
index 00000000..c51a400e
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/aceef470d69c_alter_fields_not_null.py
@@ -0,0 +1,52 @@
+"""alter fields not null
+
+Revision ID: aceef470d69c
+Revises: bd92923c218f
+Create Date: 2023-03-09 19:10:42.168133
+
+"""
+
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "aceef470d69c"
+down_revision = "bd92923c218f"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("nonebot_bison_scheduletimeweight", schema=None) as batch_op:
+ batch_op.alter_column("target_id", existing_type=sa.INTEGER(), nullable=False)
+ batch_op.alter_column("start_time", existing_type=sa.TIME(), nullable=False)
+ batch_op.alter_column("end_time", existing_type=sa.TIME(), nullable=False)
+ batch_op.alter_column("weight", existing_type=sa.INTEGER(), nullable=False)
+
+ with op.batch_alter_table("nonebot_bison_subscribe", schema=None) as batch_op:
+ batch_op.alter_column("target_id", existing_type=sa.INTEGER(), nullable=False)
+ batch_op.alter_column("user_id", existing_type=sa.INTEGER(), nullable=False)
+
+ with op.batch_alter_table("nonebot_bison_target", schema=None) as batch_op:
+ batch_op.alter_column("default_schedule_weight", existing_type=sa.INTEGER(), nullable=False)
+
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("nonebot_bison_target", schema=None) as batch_op:
+ batch_op.alter_column("default_schedule_weight", existing_type=sa.INTEGER(), nullable=True)
+
+ with op.batch_alter_table("nonebot_bison_subscribe", schema=None) as batch_op:
+ batch_op.alter_column("user_id", existing_type=sa.INTEGER(), nullable=True)
+ batch_op.alter_column("target_id", existing_type=sa.INTEGER(), nullable=True)
+
+ with op.batch_alter_table("nonebot_bison_scheduletimeweight", schema=None) as batch_op:
+ batch_op.alter_column("weight", existing_type=sa.INTEGER(), nullable=True)
+ batch_op.alter_column("end_time", existing_type=sa.TIME(), nullable=True)
+ batch_op.alter_column("start_time", existing_type=sa.TIME(), nullable=True)
+ batch_op.alter_column("target_id", existing_type=sa.INTEGER(), nullable=True)
+
+ # ### end Alembic commands ###
diff --git a/src/plugins/nonebot_bison/config/migrations/bd92923c218f_alter_json_not_null.py b/src/plugins/nonebot_bison/config/migrations/bd92923c218f_alter_json_not_null.py
new file mode 100644
index 00000000..aa3f2ff9
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/bd92923c218f_alter_json_not_null.py
@@ -0,0 +1,53 @@
+"""alter_json_not_null
+
+Revision ID: bd92923c218f
+Revises: 5da28f6facb3
+Create Date: 2023-03-02 14:04:16.492133
+
+"""
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy import select
+from sqlalchemy.orm import Session
+from sqlalchemy.ext.automap import automap_base
+
+# revision identifiers, used by Alembic.
+revision = "bd92923c218f"
+down_revision = "5da28f6facb3"
+branch_labels = None
+depends_on = None
+
+
+def set_default_value():
+ Base = automap_base()
+ Base.prepare(autoload_with=op.get_bind())
+ Subscribe = Base.classes.nonebot_bison_subscribe
+ with Session(op.get_bind()) as session:
+ select_statement = select(Subscribe)
+ results = session.scalars(select_statement)
+ for subscribe in results:
+ if subscribe.categories is None:
+ subscribe.categories = []
+ if subscribe.tags is None:
+ subscribe.tags = []
+ session.commit()
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ set_default_value()
+ with op.batch_alter_table("nonebot_bison_subscribe", schema=None) as batch_op:
+ batch_op.alter_column("categories", existing_type=sa.JSON(), nullable=False)
+ batch_op.alter_column("tags", existing_type=sa.JSON(), nullable=False)
+
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("nonebot_bison_subscribe", schema=None) as batch_op:
+ batch_op.alter_column("tags", existing_type=sa.JSON(), nullable=True)
+ batch_op.alter_column("categories", existing_type=sa.JSON(), nullable=True)
+
+ # ### end Alembic commands ###
diff --git a/src/plugins/nonebot_bison/config/migrations/c97c445e2bdb_add_constraint.py b/src/plugins/nonebot_bison/config/migrations/c97c445e2bdb_add_constraint.py
new file mode 100644
index 00000000..0388316e
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/c97c445e2bdb_add_constraint.py
@@ -0,0 +1,43 @@
+"""add constraint
+
+Revision ID: c97c445e2bdb
+Revises: 0571870f5222
+Create Date: 2022-03-26 19:46:50.910721
+
+"""
+
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "c97c445e2bdb"
+down_revision = "0571870f5222"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("subscribe", schema=None) as batch_op:
+ batch_op.create_unique_constraint("unique-subscribe-constraint", ["target_id", "user_id"])
+
+ with op.batch_alter_table("target", schema=None) as batch_op:
+ batch_op.create_unique_constraint("unique-target-constraint", ["target", "platform_name"])
+
+ with op.batch_alter_table("user", schema=None) as batch_op:
+ batch_op.create_unique_constraint("unique-user-constraint", ["type", "uid"])
+
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table("user", schema=None) as batch_op:
+ batch_op.drop_constraint("unique-user-constraint", type_="unique")
+
+ with op.batch_alter_table("target", schema=None) as batch_op:
+ batch_op.drop_constraint("unique-target-constraint", type_="unique")
+
+ with op.batch_alter_table("subscribe", schema=None) as batch_op:
+ batch_op.drop_constraint("unique-subscribe-constraint", type_="unique")
+
+ # ### end Alembic commands ###
diff --git a/src/plugins/nonebot_bison/config/migrations/f9baef347cc8_remove_old_target.py b/src/plugins/nonebot_bison/config/migrations/f9baef347cc8_remove_old_target.py
new file mode 100644
index 00000000..fbed082d
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/migrations/f9baef347cc8_remove_old_target.py
@@ -0,0 +1,34 @@
+"""remove_old_target
+
+Revision ID: f9baef347cc8
+Revises: 8d3863e9d74b
+Create Date: 2023-08-25 00:20:51.511329
+
+"""
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy.orm import Session
+from sqlalchemy.ext.automap import automap_base
+
+# revision identifiers, used by Alembic.
+revision = "f9baef347cc8"
+down_revision = "8d3863e9d74b"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ Base = automap_base()
+ Base.prepare(op.get_bind())
+ User = Base.classes.nonebot_bison_user
+ with Session(op.get_bind()) as sess:
+ users = sess.scalars(sa.select(User)).all()
+ for user in users:
+ if user.user_target["platform_type"] == "Unknow Onebot 12 Platform":
+ sess.delete(user)
+ sess.commit()
+
+
+def downgrade() -> None:
+ pass
diff --git a/src/plugins/nonebot_bison/config/subs_io/__init__.py b/src/plugins/nonebot_bison/config/subs_io/__init__.py
new file mode 100644
index 00000000..55ab0170
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/subs_io/__init__.py
@@ -0,0 +1,3 @@
+from .subs_io import subscribes_export, subscribes_import
+
+__all__ = ["subscribes_export", "subscribes_import"]
diff --git a/src/plugins/nonebot_bison/config/subs_io/nbesf_model/__init__.py b/src/plugins/nonebot_bison/config/subs_io/nbesf_model/__init__.py
new file mode 100644
index 00000000..8dae14f7
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/subs_io/nbesf_model/__init__.py
@@ -0,0 +1,6 @@
+"""nbesf is Nonebot Bison Enchangable Subscribes File!"""
+
+from . import v1, v2
+from .base import NBESFBase
+
+__all__ = ["v1", "v2", "NBESFBase"]
diff --git a/src/plugins/nonebot_bison/config/subs_io/nbesf_model/base.py b/src/plugins/nonebot_bison/config/subs_io/nbesf_model/base.py
new file mode 100644
index 00000000..426c8199
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/subs_io/nbesf_model/base.py
@@ -0,0 +1,35 @@
+from abc import ABC
+
+from pydantic import BaseModel
+from nonebot.compat import PYDANTIC_V2, ConfigDict
+from nonebot_plugin_saa.registries import AllSupportedPlatformTarget as UserInfo
+
+from ....types import Tag, Category
+
+
+class NBESFBase(BaseModel, ABC):
+ version: int # 表示nbesf格式版本,有效版本从1开始
+ groups: list = []
+
+ if PYDANTIC_V2:
+ model_config = ConfigDict(from_attributes=True)
+ else:
+
+ class Config:
+ orm_mode = True
+
+
+class SubReceipt(BaseModel):
+ """
+ 快递包中每件货物的收据
+
+ 导入订阅时的Model
+ """
+
+ user: UserInfo
+ target: str
+ target_name: str
+ platform_name: str
+ cats: list[Category]
+ tags: list[Tag]
+ # default_schedule_weight: int
diff --git a/src/plugins/nonebot_bison/config/subs_io/nbesf_model/v1.py b/src/plugins/nonebot_bison/config/subs_io/nbesf_model/v1.py
new file mode 100644
index 00000000..324edf36
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/subs_io/nbesf_model/v1.py
@@ -0,0 +1,130 @@
+"""nbesf is Nonebot Bison Enchangable Subscribes File! ver.1"""
+
+from typing import Any
+from functools import partial
+
+from nonebot.log import logger
+from pydantic import BaseModel
+from nonebot_plugin_saa import TargetQQGroup, TargetQQPrivate
+from nonebot.compat import PYDANTIC_V2, ConfigDict, model_dump, type_validate_json, type_validate_python
+
+from ..utils import NBESFParseErr
+from ....types import Tag, Category
+from .base import NBESFBase, SubReceipt
+from ...db_config import SubscribeDupException, config
+
+# ===== nbesf 定义格式 ====== #
+NBESF_VERSION = 1
+
+
+class UserHead(BaseModel):
+ """Bison快递包收货信息"""
+
+ type: str
+ uid: int
+
+ if PYDANTIC_V2:
+ model_config = ConfigDict(from_attributes=True)
+ else:
+
+ class Config:
+ orm_mode = True
+
+
+class Target(BaseModel):
+ """Bsion快递包发货信息"""
+
+ target_name: str
+ target: str
+ platform_name: str
+ default_schedule_weight: int
+
+ if PYDANTIC_V2:
+ model_config = ConfigDict(from_attributes=True)
+ else:
+
+ class Config:
+ orm_mode = True
+
+
+class SubPayload(BaseModel):
+ """Bison快递包里的单件货物"""
+
+ categories: list[Category]
+ tags: list[Tag]
+ target: Target
+
+ if PYDANTIC_V2:
+ model_config = ConfigDict(from_attributes=True)
+ else:
+
+ class Config:
+ orm_mode = True
+
+
+class SubPack(BaseModel):
+ """Bison给指定用户派送的快递包"""
+
+ user: UserHead
+ subs: list[SubPayload]
+
+
+class SubGroup(
+ NBESFBase,
+):
+ """
+ Bison的全部订单(按用户分组)
+
+ 结构参见`nbesf_model`下的对应版本
+ """
+
+ version: int = NBESF_VERSION
+ groups: list[SubPack]
+
+
+# ======================= #
+
+
+async def subs_receipt_gen(nbesf_data: SubGroup):
+ for item in nbesf_data.groups:
+ match item.user.type:
+ case "group":
+ user = TargetQQGroup(group_id=item.user.uid)
+ case "private":
+ user = TargetQQPrivate(user_id=item.user.uid)
+ case _:
+ raise NotImplementedError(f"nbesf v1 不支持的用户类型:{item.user.type}")
+
+ sub_receipt = partial(SubReceipt, user=user)
+
+ for sub in item.subs:
+ receipt = sub_receipt(
+ target=sub.target.target,
+ target_name=sub.target.target_name,
+ platform_name=sub.target.platform_name,
+ cats=sub.categories,
+ tags=sub.tags,
+ )
+ try:
+ await config.add_subscribe(receipt.user, **model_dump(receipt, exclude={"user"}))
+ except SubscribeDupException:
+ logger.warning(f"!添加订阅条目 {repr(receipt)} 失败: 相同的订阅已存在")
+ except Exception as e:
+ logger.error(f"!添加订阅条目 {repr(receipt)} 失败: {repr(e)}")
+ else:
+ logger.success(f"添加订阅条目 {repr(receipt)} 成功!")
+
+
+def nbesf_parser(raw_data: Any) -> SubGroup:
+ try:
+ if isinstance(raw_data, str):
+ nbesf_data = type_validate_json(SubGroup, raw_data)
+ else:
+ nbesf_data = type_validate_python(SubGroup, raw_data)
+
+ except Exception as e:
+ logger.error("数据解析失败,该数据格式可能不满足NBESF格式标准!")
+ raise NBESFParseErr("数据解析失败") from e
+ else:
+ logger.success("NBESF文件解析成功.")
+ return nbesf_data
diff --git a/src/plugins/nonebot_bison/config/subs_io/nbesf_model/v2.py b/src/plugins/nonebot_bison/config/subs_io/nbesf_model/v2.py
new file mode 100644
index 00000000..7b2a1884
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/subs_io/nbesf_model/v2.py
@@ -0,0 +1,106 @@
+"""nbesf is Nonebot Bison Enchangable Subscribes File! ver.2"""
+
+from typing import Any
+from functools import partial
+
+from nonebot.log import logger
+from pydantic import BaseModel
+from nonebot_plugin_saa.registries import AllSupportedPlatformTarget
+from nonebot.compat import PYDANTIC_V2, ConfigDict, model_dump, type_validate_json, type_validate_python
+
+from ..utils import NBESFParseErr
+from ....types import Tag, Category
+from .base import NBESFBase, SubReceipt
+from ...db_config import SubscribeDupException, config
+
+# ===== nbesf 定义格式 ====== #
+NBESF_VERSION = 2
+
+
+class Target(BaseModel):
+ """Bsion快递包发货信息"""
+
+ target_name: str
+ target: str
+ platform_name: str
+ default_schedule_weight: int
+
+ if PYDANTIC_V2:
+ model_config = ConfigDict(from_attributes=True)
+ else:
+
+ class Config:
+ orm_mode = True
+
+
+class SubPayload(BaseModel):
+ """Bison快递包里的单件货物"""
+
+ categories: list[Category]
+ tags: list[Tag]
+ target: Target
+
+ if PYDANTIC_V2:
+ model_config = ConfigDict(from_attributes=True)
+ else:
+
+ class Config:
+ orm_mode = True
+
+
+class SubPack(BaseModel):
+ """Bison给指定用户派送的快递包"""
+
+ # user_target: Bison快递包收货信息
+ user_target: AllSupportedPlatformTarget
+ subs: list[SubPayload]
+
+
+class SubGroup(NBESFBase):
+ """
+ Bison的全部订单(按用户分组)
+
+ 结构参见`nbesf_model`下的对应版本
+ """
+
+ version: int = NBESF_VERSION
+ groups: list[SubPack]
+
+
+# ======================= #
+
+
+async def subs_receipt_gen(nbesf_data: SubGroup):
+ for item in nbesf_data.groups:
+ sub_receipt = partial(SubReceipt, user=item.user_target)
+
+ for sub in item.subs:
+ receipt = sub_receipt(
+ target=sub.target.target,
+ target_name=sub.target.target_name,
+ platform_name=sub.target.platform_name,
+ cats=sub.categories,
+ tags=sub.tags,
+ )
+ try:
+ await config.add_subscribe(receipt.user, **model_dump(receipt, exclude={"user"}))
+ except SubscribeDupException:
+ logger.warning(f"!添加订阅条目 {repr(receipt)} 失败: 相同的订阅已存在")
+ except Exception as e:
+ logger.error(f"!添加订阅条目 {repr(receipt)} 失败: {repr(e)}")
+ else:
+ logger.success(f"添加订阅条目 {repr(receipt)} 成功!")
+
+
+def nbesf_parser(raw_data: Any) -> SubGroup:
+ try:
+ if isinstance(raw_data, str):
+ nbesf_data = type_validate_json(SubGroup, raw_data)
+ else:
+ nbesf_data = type_validate_python(SubGroup, raw_data)
+
+ except Exception as e:
+ logger.error("数据解析失败,该数据格式可能不满足NBESF格式标准!")
+ raise NBESFParseErr("数据解析失败") from e
+ else:
+ return nbesf_data
diff --git a/src/plugins/nonebot_bison/config/subs_io/subs_io.py b/src/plugins/nonebot_bison/config/subs_io/subs_io.py
new file mode 100644
index 00000000..ec826957
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/subs_io/subs_io.py
@@ -0,0 +1,77 @@
+from typing import cast
+from collections import defaultdict
+from collections.abc import Callable
+
+from sqlalchemy import select
+from nonebot.log import logger
+from sqlalchemy.sql.selectable import Select
+from nonebot_plugin_saa import PlatformTarget
+from nonebot.compat import type_validate_python
+from nonebot_plugin_datastore.db import create_session
+from sqlalchemy.orm.strategy_options import selectinload
+
+from .utils import NBESFVerMatchErr
+from ..db_model import User, Subscribe
+from .nbesf_model import NBESFBase, v1, v2
+
+
+async def subscribes_export(selector: Callable[[Select], Select]) -> v2.SubGroup:
+ """
+ 将Bison订阅导出为 Nonebot Bison Exchangable Subscribes File 标准格式的 SubGroup 类型数据
+
+ selector:
+ 对 sqlalchemy Select 对象的操作函数,用于限定查询范围
+ e.g. lambda stmt: stmt.where(User.uid=2233, User.type="group")
+ """
+ async with create_session() as sess:
+ sub_stmt = select(Subscribe).join(User)
+ sub_stmt = selector(sub_stmt).options(selectinload(Subscribe.target))
+ sub_stmt = cast(Select[tuple[Subscribe]], sub_stmt)
+ sub_data = await sess.scalars(sub_stmt)
+
+ user_stmt = select(User).join(Subscribe)
+ user_stmt = selector(user_stmt).distinct()
+ user_stmt = cast(Select[tuple[User]], user_stmt)
+ user_data = await sess.scalars(user_stmt)
+
+ groups: list[v2.SubPack] = []
+ user_id_sub_dict: dict[int, list[v2.SubPayload]] = defaultdict(list)
+
+ for sub in sub_data:
+ sub_paylaod = type_validate_python(v2.SubPayload, sub)
+ user_id_sub_dict[sub.user_id].append(sub_paylaod)
+
+ for user in user_data:
+ assert isinstance(user, User)
+ sub_pack = v2.SubPack(
+ user_target=PlatformTarget.deserialize(user.user_target),
+ subs=user_id_sub_dict[user.id],
+ )
+ groups.append(sub_pack)
+
+ sub_group = v2.SubGroup(groups=groups)
+
+ return sub_group
+
+
+async def subscribes_import(
+ nbesf_data: NBESFBase,
+):
+ """
+ 从 Nonebot Bison Exchangable Subscribes File 标准格式的数据中导入订阅
+
+ nbesf_data:
+ 符合nbesf_model标准的 SubGroup 类型数据
+ """
+
+ logger.info("开始添加订阅流程")
+ match nbesf_data.version:
+ case 1:
+ assert isinstance(nbesf_data, v1.SubGroup)
+ await v1.subs_receipt_gen(nbesf_data)
+ case 2:
+ assert isinstance(nbesf_data, v2.SubGroup)
+ await v2.subs_receipt_gen(nbesf_data)
+ case _:
+ raise NBESFVerMatchErr(f"不支持的NBESF版本:{nbesf_data.version}")
+ logger.info("订阅流程结束,请检查所有订阅记录是否全部添加成功")
diff --git a/src/plugins/nonebot_bison/config/subs_io/utils.py b/src/plugins/nonebot_bison/config/subs_io/utils.py
new file mode 100644
index 00000000..181769a2
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/subs_io/utils.py
@@ -0,0 +1,4 @@
+class NBESFVerMatchErr(Exception): ...
+
+
+class NBESFParseErr(Exception): ...
diff --git a/src/plugins/nonebot_bison/config/utils.py b/src/plugins/nonebot_bison/config/utils.py
new file mode 100644
index 00000000..8c064974
--- /dev/null
+++ b/src/plugins/nonebot_bison/config/utils.py
@@ -0,0 +1,10 @@
+class NoSuchUserException(Exception):
+ pass
+
+
+class NoSuchSubscribeException(Exception):
+ pass
+
+
+class NoSuchTargetException(Exception):
+ pass
diff --git a/src/plugins/nonebot_bison/platform/__init__.py b/src/plugins/nonebot_bison/platform/__init__.py
new file mode 100644
index 00000000..c99ce122
--- /dev/null
+++ b/src/plugins/nonebot_bison/platform/__init__.py
@@ -0,0 +1,24 @@
+from pathlib import Path
+from pkgutil import iter_modules
+from collections import defaultdict
+from importlib import import_module
+
+from .platform import Platform, make_no_target_group
+
+_package_dir = str(Path(__file__).resolve().parent)
+for _, module_name, _ in iter_modules([_package_dir]):
+ import_module(f"{__name__}.{module_name}")
+
+
+_platform_list: defaultdict[str, list[type[Platform]]] = defaultdict(list)
+for _platform in Platform.registry:
+ if not _platform.enabled:
+ continue
+ _platform_list[_platform.platform_name].append(_platform)
+
+platform_manager: dict[str, type[Platform]] = {}
+for name, platform_list in _platform_list.items():
+ if len(platform_list) == 1:
+ platform_manager[name] = platform_list[0]
+ else:
+ platform_manager[name] = make_no_target_group(platform_list)
diff --git a/src/plugins/nonebot_bison/platform/arknights.py b/src/plugins/nonebot_bison/platform/arknights.py
new file mode 100644
index 00000000..e931d69e
--- /dev/null
+++ b/src/plugins/nonebot_bison/platform/arknights.py
@@ -0,0 +1,251 @@
+from typing import Any
+from functools import partial
+
+from yarl import URL
+from httpx import AsyncClient
+from bs4 import BeautifulSoup as bs
+from pydantic import Field, BaseModel
+from nonebot.compat import type_validate_python
+
+from ..post import Post
+from ..types import Target, RawPost, Category
+from .platform import NewMessage, StatusChange
+from ..utils.scheduler_config import SchedulerConfig
+
+
+class ArkResponseBase(BaseModel):
+ code: int
+ msg: str
+
+
+class BulletinListItem(BaseModel):
+ cid: str
+ title: str
+ category: int
+ display_time: str = Field(alias="displayTime")
+ updated_at: int = Field(alias="updatedAt")
+ sticky: bool
+
+
+class BulletinList(BaseModel):
+ list: list[BulletinListItem]
+
+
+class BulletinData(BaseModel):
+ cid: str
+ display_type: int = Field(alias="displayType")
+ title: str
+ category: int
+ header: str
+ content: str
+ jump_link: str = Field(alias="jumpLink")
+ banner_image_url: str = Field(alias="bannerImageUrl")
+ display_time: str = Field(alias="displayTime")
+ updated_at: int = Field(alias="updatedAt")
+
+
+class ArkBulletinListResponse(ArkResponseBase):
+ data: BulletinList
+
+
+class ArkBulletinResponse(ArkResponseBase):
+ data: BulletinData
+
+
+class ArknightsSchedConf(SchedulerConfig):
+ name = "arknights"
+ schedule_type = "interval"
+ schedule_setting = {"seconds": 30}
+
+
+class Arknights(NewMessage):
+ categories = {1: "游戏公告"}
+ platform_name = "arknights"
+ name = "明日方舟游戏信息"
+ enable_tag = False
+ enabled = True
+ is_common = False
+ scheduler = ArknightsSchedConf
+ has_target = False
+ default_theme = "arknights"
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None:
+ return "明日方舟游戏信息"
+
+ async def get_sub_list(self, _) -> list[BulletinListItem]:
+ raw_data = await self.client.get("https://ak-webview.hypergryph.com/api/game/bulletinList?target=IOS")
+ return type_validate_python(ArkBulletinListResponse, raw_data.json()).data.list
+
+ def get_id(self, post: BulletinListItem) -> Any:
+ return post.cid
+
+ def get_date(self, post: BulletinListItem) -> Any:
+ # 为什么不使用post.updated_at?
+ # update_at的时间是上传鹰角服务器的时间,而不是公告发布的时间
+ # 也就是说鹰角可能会在中午就把晚上的公告上传到服务器,但晚上公告才会显示,但是update_at就是中午的时间不会改变
+ # 如果指定了get_date,那么get_date会被优先使用, 并在获取到的值超过2小时时忽略这条post,导致其不会被发送
+ return None
+
+ def get_category(self, _) -> Category:
+ return Category(1)
+
+ async def parse(self, raw_post: BulletinListItem) -> Post:
+ raw_data = await self.client.get(
+ f"https://ak-webview.hypergryph.com/api/game/bulletin/{self.get_id(post=raw_post)}"
+ )
+ data = type_validate_python(ArkBulletinResponse, raw_data.json()).data
+
+ def title_escape(text: str) -> str:
+ return text.replace("\\n", " - ")
+
+ # gen title, content
+ if data.header:
+ # header是title的更详细版本
+ # header会和content一起出现
+ title = data.header
+ else:
+ # 只有一张图片
+ title = title_escape(data.title)
+
+ return Post(
+ self,
+ content=data.content,
+ title=title,
+ nickname="明日方舟游戏内公告",
+ images=[data.banner_image_url] if data.banner_image_url else None,
+ url=(url.human_repr() if (url := URL(data.jump_link)).scheme.startswith("http") else None),
+ timestamp=data.updated_at,
+ compress=True,
+ )
+
+
+class AkVersion(StatusChange):
+ categories = {2: "更新信息"}
+ platform_name = "arknights"
+ name = "明日方舟游戏信息"
+ enable_tag = False
+ enabled = True
+ is_common = False
+ scheduler = ArknightsSchedConf
+ has_target = False
+ default_theme = "brief"
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None:
+ return "明日方舟游戏信息"
+
+ async def get_status(self, _):
+ res_ver = await self.client.get("https://ak-conf.hypergryph.com/config/prod/official/IOS/version")
+ res_preanounce = await self.client.get(
+ "https://ak-conf.hypergryph.com/config/prod/announce_meta/IOS/preannouncement.meta.json"
+ )
+ res = res_ver.json()
+ res.update(res_preanounce.json())
+ return res
+
+ def compare_status(self, _, old_status, new_status):
+ res = []
+ ArkUpdatePost = partial(Post, self, "", nickname="明日方舟更新信息")
+ if old_status.get("preAnnounceType") == 2 and new_status.get("preAnnounceType") == 0:
+ res.append(ArkUpdatePost(title="登录界面维护公告上线(大概是开始维护了)"))
+ elif old_status.get("preAnnounceType") == 0 and new_status.get("preAnnounceType") == 2:
+ res.append(ArkUpdatePost(title="登录界面维护公告下线(大概是开服了,冲!)"))
+ if old_status.get("clientVersion") != new_status.get("clientVersion"):
+ res.append(ArkUpdatePost(title="游戏本体更新(大更新)"))
+ if old_status.get("resVersion") != new_status.get("resVersion"):
+ res.append(ArkUpdatePost(title="游戏资源更新(小更新)"))
+ return res
+
+ def get_category(self, _):
+ return Category(2)
+
+ async def parse(self, raw_post):
+ return raw_post
+
+
+class MonsterSiren(NewMessage):
+ categories = {3: "塞壬唱片新闻"}
+ platform_name = "arknights"
+ name = "明日方舟游戏信息"
+ enable_tag = False
+ enabled = True
+ is_common = False
+ scheduler = ArknightsSchedConf
+ has_target = False
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None:
+ return "明日方舟游戏信息"
+
+ async def get_sub_list(self, _) -> list[RawPost]:
+ raw_data = await self.client.get("https://monster-siren.hypergryph.com/api/news")
+ return raw_data.json()["data"]["list"]
+
+ def get_id(self, post: RawPost) -> Any:
+ return post["cid"]
+
+ def get_date(self, _) -> None:
+ return None
+
+ def get_category(self, _) -> Category:
+ return Category(3)
+
+ async def parse(self, raw_post: RawPost) -> Post:
+ url = f'https://monster-siren.hypergryph.com/info/{raw_post["cid"]}'
+ res = await self.client.get(f'https://monster-siren.hypergryph.com/api/news/{raw_post["cid"]}')
+ raw_data = res.json()
+ content = raw_data["data"]["content"]
+ content = content.replace("
", "\n")
+ soup = bs(content, "html.parser")
+ imgs = [x["src"] for x in soup("img")]
+ text = f'{raw_post["title"]}\n{soup.text.strip()}'
+ return Post(
+ self,
+ text,
+ images=imgs,
+ url=url,
+ nickname="塞壬唱片新闻",
+ compress=True,
+ )
+
+
+class TerraHistoricusComic(NewMessage):
+ categories = {4: "泰拉记事社漫画"}
+ platform_name = "arknights"
+ name = "明日方舟游戏信息"
+ enable_tag = False
+ enabled = True
+ is_common = False
+ scheduler = ArknightsSchedConf
+ has_target = False
+ default_theme = "brief"
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None:
+ return "明日方舟游戏信息"
+
+ async def get_sub_list(self, _) -> list[RawPost]:
+ raw_data = await self.client.get("https://terra-historicus.hypergryph.com/api/recentUpdate")
+ return raw_data.json()["data"]
+
+ def get_id(self, post: RawPost) -> Any:
+ return f'{post["comicCid"]}/{post["episodeCid"]}'
+
+ def get_date(self, _) -> None:
+ return None
+
+ def get_category(self, _) -> Category:
+ return Category(4)
+
+ async def parse(self, raw_post: RawPost) -> Post:
+ url = f'https://terra-historicus.hypergryph.com/comic/{raw_post["comicCid"]}/episode/{raw_post["episodeCid"]}'
+ return Post(
+ self,
+ raw_post["subtitle"],
+ title=f'{raw_post["title"]} - {raw_post["episodeShortTitle"]}',
+ images=[raw_post["coverUrl"]],
+ url=url,
+ nickname="泰拉记事社漫画",
+ compress=True,
+ )
diff --git a/src/plugins/nonebot_bison/platform/bilibili.py b/src/plugins/nonebot_bison/platform/bilibili.py
new file mode 100644
index 00000000..e8936593
--- /dev/null
+++ b/src/plugins/nonebot_bison/platform/bilibili.py
@@ -0,0 +1,567 @@
+import re
+import json
+from abc import ABC
+from copy import deepcopy
+from enum import Enum, unique
+from typing_extensions import Self
+from datetime import datetime, timedelta
+from typing import Any, TypeVar, TypeAlias, NamedTuple
+
+from httpx import AsyncClient
+from nonebot.log import logger
+from pydantic import Field, BaseModel
+from nonebot.compat import PYDANTIC_V2, ConfigDict, type_validate_json, type_validate_python
+
+from nonebot_bison.compat import model_rebuild
+
+from ..post import Post
+from ..types import Tag, Target, RawPost, ApiError, Category
+from ..utils import SchedulerConfig, http_client, text_similarity
+from .platform import NewMessage, StatusChange, CategoryNotSupport, CategoryNotRecognize
+
+TBaseModel = TypeVar("TBaseModel", bound=type[BaseModel])
+
+
+# 不能当成装饰器用
+# 当装饰器用时,global namespace 中还没有被装饰的类,会报错
+def model_rebuild_recurse(cls: TBaseModel) -> TBaseModel:
+ """Recursively rebuild all BaseModel subclasses in the class."""
+ if not PYDANTIC_V2:
+ from inspect import isclass, getmembers
+
+ for _, sub_cls in getmembers(cls, lambda x: isclass(x) and issubclass(x, BaseModel)):
+ model_rebuild_recurse(sub_cls)
+ model_rebuild(cls)
+ return cls
+
+
+class Base(BaseModel):
+ if PYDANTIC_V2:
+ model_config = ConfigDict(from_attributes=True)
+ else:
+
+ class Config:
+ orm_mode = True
+
+
+class APIBase(Base):
+ """Bilibili API返回的基础数据"""
+
+ code: int
+ message: str
+
+
+class UserAPI(APIBase):
+ class Card(Base):
+ name: str
+
+ class Data(Base):
+ card: "UserAPI.Card"
+
+ data: Data | None = None
+
+
+class PostAPI(APIBase):
+ class Info(Base):
+ uname: str
+
+ class UserProfile(Base):
+ info: "PostAPI.Info"
+
+ class Origin(Base):
+ uid: int
+ dynamic_id: int
+ dynamic_id_str: str
+ timestamp: int
+ type: int
+ rid: int
+ bvid: str | None = None
+
+ class Desc(Base):
+ dynamic_id: int
+ dynamic_id_str: str
+ timestamp: int
+ type: int
+ user_profile: "PostAPI.UserProfile"
+ rid: int
+ bvid: str | None = None
+
+ origin: "PostAPI.Origin | None" = None
+
+ class Card(Base):
+ desc: "PostAPI.Desc"
+ card: str
+
+ class Data(Base):
+ cards: "list[PostAPI.Card] | None"
+
+ data: Data | None = None
+
+
+DynRawPost: TypeAlias = PostAPI.Card
+
+model_rebuild_recurse(UserAPI)
+model_rebuild_recurse(PostAPI)
+
+
+class BilibiliClient:
+ _client: AsyncClient
+ _refresh_time: datetime
+ cookie_expire_time = timedelta(hours=5)
+
+ def __init__(self) -> None:
+ self._client = http_client()
+ self._refresh_time = datetime(year=2000, month=1, day=1) # an expired time
+
+ async def _init_session(self):
+ res = await self._client.get("https://www.bilibili.com/")
+ if res.status_code != 200:
+ logger.warning("unable to refresh temp cookie")
+ else:
+ self._refresh_time = datetime.now()
+
+ async def _refresh_client(self):
+ if datetime.now() - self._refresh_time > self.cookie_expire_time:
+ await self._init_session()
+
+ async def get_client(self) -> AsyncClient:
+ await self._refresh_client()
+ return self._client
+
+
+bilibili_client = BilibiliClient()
+
+
+class BaseSchedConf(ABC, SchedulerConfig):
+ schedule_type = "interval"
+ bilibili_client: BilibiliClient
+
+ def __init__(self):
+ super().__init__()
+ self.bilibili_client = bilibili_client
+
+ async def get_client(self, _: Target) -> AsyncClient:
+ return await self.bilibili_client.get_client()
+
+ async def get_query_name_client(self) -> AsyncClient:
+ return await self.bilibili_client.get_client()
+
+
+class BilibiliSchedConf(BaseSchedConf):
+ name = "bilibili.com"
+ schedule_setting = {"seconds": 10}
+
+
+class BililiveSchedConf(BaseSchedConf):
+ name = "live.bilibili.com"
+ schedule_setting = {"seconds": 3}
+
+
+class Bilibili(NewMessage):
+ categories = {
+ 1: "一般动态",
+ 2: "专栏文章",
+ 3: "视频",
+ 4: "纯文字",
+ 5: "转发",
+ # 5: "短视频"
+ }
+ platform_name = "bilibili"
+ enable_tag = True
+ enabled = True
+ is_common = True
+ scheduler = BilibiliSchedConf
+ name = "B站"
+ has_target = True
+ parse_target_promot = "请输入用户主页的链接"
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None:
+ res = await client.get("https://api.bilibili.com/x/web-interface/card", params={"mid": target})
+ res.raise_for_status()
+ res_data = type_validate_json(UserAPI, res.content)
+ if res_data.code != 0:
+ return None
+ return res_data.data.card.name if res_data.data else None
+
+ @classmethod
+ async def parse_target(cls, target_text: str) -> Target:
+ if re.match(r"\d+", target_text):
+ return Target(target_text)
+ elif m := re.match(r"(?:https?://)?space\.bilibili\.com/(\d+)", target_text):
+ return Target(m.group(1))
+ else:
+ raise cls.ParseTargetException()
+
+ async def get_sub_list(self, target: Target) -> list[DynRawPost]:
+ params = {"host_uid": target, "offset": 0, "need_top": 0}
+ res = await self.client.get(
+ "https://api.vc.bilibili.com/dynamic_svr/v1/dynamic_svr/space_history",
+ params=params,
+ timeout=4.0,
+ )
+ res.raise_for_status()
+ res_obj = type_validate_json(PostAPI, res.content)
+
+ if res_obj.code == 0:
+ if (data := res_obj.data) and (card := data.cards):
+ return card
+ return []
+ raise ApiError(res.request.url)
+
+ def get_id(self, post: DynRawPost) -> int:
+ return post.desc.dynamic_id
+
+ def get_date(self, post: DynRawPost) -> int:
+ return post.desc.timestamp
+
+ def _do_get_category(self, post_type: int) -> Category:
+ match post_type:
+ case 2:
+ return Category(1)
+ case 64:
+ return Category(2)
+ case 8:
+ return Category(3)
+ case 4:
+ return Category(4)
+ case 1:
+ # 转发
+ return Category(5)
+ case unknown_type:
+ raise CategoryNotRecognize(unknown_type)
+
+ def get_category(self, post: DynRawPost) -> Category:
+ post_type = post.desc.type
+ return self._do_get_category(post_type)
+
+ def get_tags(self, raw_post: DynRawPost) -> list[Tag]:
+ card_content = json.loads(raw_post.card)
+ text: str = card_content["item"]["content"]
+ result: list[str] = re.findall(r"#(.*?)#", text)
+ return result
+
+ def _text_process(self, dynamic: str, desc: str, title: str) -> str:
+ similarity = 1.0 if len(dynamic) == 0 or len(desc) == 0 else text_similarity(dynamic, desc)
+ if len(dynamic) == 0 and len(desc) == 0:
+ text = title
+ elif similarity > 0.8:
+ text = title + "\n\n" + desc if len(dynamic) < len(desc) else dynamic + "\n=================\n" + title
+ else:
+ text = dynamic + "\n=================\n" + title + "\n\n" + desc
+ return text
+
+ def _raw_post_parse(self, raw_post: DynRawPost, in_repost: bool = False):
+ class ParsedPost(NamedTuple):
+ text: str
+ pics: list[str]
+ url: str | None
+ repost_owner: str | None = None
+ repost: "ParsedPost | None" = None
+
+ card_content: dict[str, Any] = json.loads(raw_post.card)
+ repost_owner: str | None = ou["info"]["uname"] if (ou := card_content.get("origin_user")) else None
+
+ def extract_url_id(url_template: str, name: str) -> str | None:
+ if in_repost:
+ if origin := raw_post.desc.origin:
+ return url_template.format(getattr(origin, name))
+ return None
+ return url_template.format(getattr(raw_post.desc, name))
+
+ match self._do_get_category(raw_post.desc.type):
+ case 1:
+ # 一般动态
+ url = extract_url_id("https://t.bilibili.com/{}", "dynamic_id_str")
+ text: str = card_content["item"]["description"]
+ pic: list[str] = [img["img_src"] for img in card_content["item"]["pictures"]]
+ return ParsedPost(text, pic, url, repost_owner)
+ case 2:
+ # 专栏文章
+ url = extract_url_id("https://www.bilibili.com/read/cv{}", "rid")
+ text = "{} {}".format(card_content["title"], card_content["summary"])
+ pic = card_content["image_urls"]
+ return ParsedPost(text, pic, url, repost_owner)
+ case 3:
+ # 视频
+ url = extract_url_id("https://www.bilibili.com/video/{}", "bvid")
+ dynamic = card_content.get("dynamic", "")
+ title = card_content["title"]
+ desc = card_content.get("desc", "")
+ text = self._text_process(dynamic, desc, title)
+ pic = [card_content["pic"]]
+ return ParsedPost(text, pic, url, repost_owner)
+ case 4:
+ # 纯文字
+ url = extract_url_id("https://t.bilibili.com/{}", "dynamic_id_str")
+ text = card_content["item"]["content"]
+ pic = []
+ return ParsedPost(text, pic, url, repost_owner)
+ case 5:
+ # 转发
+ url = extract_url_id("https://t.bilibili.com/{}", "dynamic_id_str")
+ text = card_content["item"]["content"]
+ orig_type: int = card_content["item"]["orig_type"]
+ orig_card: str = card_content["origin"]
+ orig_post = DynRawPost(desc=raw_post.desc, card=orig_card)
+ orig_post.desc.type = orig_type
+
+ orig_parsed_post = self._raw_post_parse(orig_post, in_repost=True)
+ return ParsedPost(text, [], url, repost_owner, orig_parsed_post)
+ case unsupported_type:
+ raise CategoryNotSupport(unsupported_type)
+
+ async def parse(self, raw_post: DynRawPost) -> Post:
+ parsed_raw_post = self._raw_post_parse(raw_post)
+
+ post = Post(
+ self,
+ parsed_raw_post.text,
+ url=parsed_raw_post.url,
+ images=list(parsed_raw_post.pics),
+ nickname=raw_post.desc.user_profile.info.uname,
+ )
+ if rp := parsed_raw_post.repost:
+ post.repost = Post(
+ self,
+ rp.text,
+ url=rp.url,
+ images=list(rp.pics),
+ nickname=rp.repost_owner,
+ )
+ return post
+
+
+class Bilibililive(StatusChange):
+ categories = {1: "开播提醒", 2: "标题更新提醒", 3: "下播提醒"}
+ platform_name = "bilibili-live"
+ enable_tag = False
+ enabled = True
+ is_common = True
+ scheduler = BililiveSchedConf
+ name = "Bilibili直播"
+ has_target = True
+ use_batch = True
+ default_theme = "brief"
+
+ @unique
+ class LiveStatus(Enum):
+ # 直播状态
+ # 0: 未开播
+ # 1: 正在直播
+ # 2: 轮播中
+ OFF = 0
+ ON = 1
+ CYCLE = 2
+
+ @unique
+ class LiveAction(Enum):
+ # 当前直播行为,由新旧直播状态对比决定
+ # on: 正在直播
+ # off: 未开播
+ # turn_on: 状态变更为正在直播
+ # turn_off: 状态变更为未开播
+ # title_update: 标题更新
+ TURN_ON = "turn_on"
+ TURN_OFF = "turn_off"
+ ON = "on"
+ OFF = "off"
+ TITLE_UPDATE = "title_update"
+
+ class Info(BaseModel):
+ title: str
+ room_id: int # 直播间号
+ uid: int # 主播uid
+ live_time: int # 开播时间
+ live_status: "Bilibililive.LiveStatus"
+ area_name: str = Field(alias="area_v2_name") # 新版分区名
+ uname: str # 主播名
+ face: str # 头像url
+ cover: str = Field(alias="cover_from_user") # 封面url
+ keyframe: str # 关键帧url,可能会有延迟
+ category: Category = Field(default=Category(0))
+
+ def get_live_action(self, old_info: Self) -> "Bilibililive.LiveAction":
+ status = Bilibililive.LiveStatus
+ action = Bilibililive.LiveAction
+ if old_info.live_status in [status.OFF, status.CYCLE] and self.live_status == status.ON:
+ return action.TURN_ON
+ elif old_info.live_status == status.ON and self.live_status in [
+ status.OFF,
+ status.CYCLE,
+ ]:
+ return action.TURN_OFF
+ elif old_info.live_status == status.ON and self.live_status == status.ON:
+ if old_info.title != self.title:
+ # 开播时通常会改标题,避免短时间推送两次
+ return action.TITLE_UPDATE
+ else:
+ return action.ON
+ else:
+ return action.OFF
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None:
+ res = await client.get("https://api.bilibili.com/x/web-interface/card", params={"mid": target})
+ res_data = json.loads(res.text)
+ if res_data["code"]:
+ return None
+ return res_data["data"]["card"]["name"]
+
+ def _gen_empty_info(self, uid: int) -> Info:
+ """返回一个空的Info,用于该用户没有直播间的情况"""
+ return Bilibililive.Info(
+ title="",
+ room_id=0,
+ uid=uid,
+ live_time=0,
+ live_status=Bilibililive.LiveStatus.OFF,
+ area_v2_name="",
+ uname="",
+ face="",
+ cover_from_user="",
+ keyframe="",
+ )
+
+ async def batch_get_status(self, targets: list[Target]) -> list[Info]:
+ # https://github.com/SocialSisterYi/bilibili-API-collect/blob/master/docs/live/info.md#批量查询直播间状态
+ res = await self.client.get(
+ "https://api.live.bilibili.com/room/v1/Room/get_status_info_by_uids",
+ params={"uids[]": targets},
+ timeout=4.0,
+ )
+ res_dict = res.json()
+
+ if res_dict["code"] != 0:
+ raise self.FetchError()
+
+ data = res_dict.get("data", {})
+ infos = []
+ for target in targets:
+ if target in data.keys():
+ infos.append(type_validate_python(self.Info, data[target]))
+ else:
+ infos.append(self._gen_empty_info(int(target)))
+ return infos
+
+ def compare_status(self, _: Target, old_status: Info, new_status: Info) -> list[RawPost]:
+ action = Bilibililive.LiveAction
+ match new_status.get_live_action(old_status):
+ case action.TURN_ON:
+ return self._gen_current_status(new_status, 1)
+ case action.TITLE_UPDATE:
+ return self._gen_current_status(new_status, 2)
+ case action.TURN_OFF:
+ return self._gen_current_status(new_status, 3)
+ case _:
+ return []
+
+ def _gen_current_status(self, new_status: Info, category: Category):
+ current_status = deepcopy(new_status)
+ current_status.category = Category(category)
+ return [current_status]
+
+ def get_category(self, status: Info) -> Category:
+ assert status.category != Category(0)
+ return status.category
+
+ async def parse(self, raw_post: Info) -> Post:
+ url = f"https://live.bilibili.com/{raw_post.room_id}"
+ pic = [raw_post.cover] if raw_post.category == Category(1) else [raw_post.keyframe]
+ title = f"[{self.categories[raw_post.category].rstrip('提醒')}] {raw_post.title}"
+ target_name = f"{raw_post.uname} {raw_post.area_name}"
+ return Post(
+ self,
+ "",
+ title=title,
+ url=url,
+ images=list(pic),
+ nickname=target_name,
+ compress=True,
+ )
+
+
+class BilibiliBangumi(StatusChange):
+ categories = {}
+ platform_name = "bilibili-bangumi"
+ enable_tag = False
+ enabled = True
+ is_common = True
+ scheduler = BilibiliSchedConf
+ name = "Bilibili剧集"
+ has_target = True
+ parse_target_promot = "请输入剧集主页"
+ default_theme = "brief"
+
+ _url = "https://api.bilibili.com/pgc/review/user"
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None:
+ res = await client.get(cls._url, params={"media_id": target})
+ res_data = res.json()
+ if res_data["code"]:
+ return None
+ return res_data["result"]["media"]["title"]
+
+ @classmethod
+ async def parse_target(cls, target_string: str) -> Target:
+ if re.match(r"\d+", target_string):
+ return Target(target_string)
+ elif m := re.match(r"md(\d+)", target_string):
+ return Target(m[1])
+ elif m := re.match(r"(?:https?://)?www\.bilibili\.com/bangumi/media/md(\d+)", target_string):
+ return Target(m[1])
+ raise cls.ParseTargetException()
+
+ async def get_status(self, target: Target):
+ res = await self.client.get(
+ self._url,
+ params={"media_id": target},
+ timeout=4.0,
+ )
+ res_dict = res.json()
+ if res_dict["code"] == 0:
+ return {
+ "index": res_dict["result"]["media"]["new_ep"]["index"],
+ "index_show": res_dict["result"]["media"]["new_ep"]["index_show"],
+ "season_id": res_dict["result"]["media"]["season_id"],
+ }
+ else:
+ raise self.FetchError
+
+ def compare_status(self, target: Target, old_status, new_status) -> list[RawPost]:
+ if new_status["index"] != old_status["index"]:
+ return [new_status]
+ else:
+ return []
+
+ async def parse(self, raw_post: RawPost) -> Post:
+ detail_res = await self.client.get(
+ f'https://api.bilibili.com/pgc/view/web/season?season_id={raw_post["season_id"]}'
+ )
+ detail_dict = detail_res.json()
+ lastest_episode = None
+ for episode in detail_dict["result"]["episodes"][::-1]:
+ if episode["badge"] in ("", "会员"):
+ lastest_episode = episode
+ break
+ if not lastest_episode:
+ lastest_episode = detail_dict["result"]["episodes"]
+
+ url = lastest_episode["link"]
+ pic: list[str] = [lastest_episode["cover"]]
+ target_name = detail_dict["result"]["season_title"]
+ content = raw_post["index_show"]
+ title = lastest_episode["share_copy"]
+ return Post(
+ self,
+ content,
+ title=title,
+ url=url,
+ images=list(pic),
+ nickname=target_name,
+ compress=True,
+ )
+
+
+model_rebuild(Bilibililive.Info)
diff --git a/src/plugins/nonebot_bison/platform/ff14.py b/src/plugins/nonebot_bison/platform/ff14.py
new file mode 100644
index 00000000..e050aaef
--- /dev/null
+++ b/src/plugins/nonebot_bison/platform/ff14.py
@@ -0,0 +1,46 @@
+from typing import Any
+
+from httpx import AsyncClient
+
+from ..post import Post
+from ..utils import scheduler
+from .platform import NewMessage
+from ..types import Target, RawPost
+
+
+class FF14(NewMessage):
+ categories = {}
+ platform_name = "ff14"
+ name = "最终幻想XIV官方公告"
+ enable_tag = False
+ enabled = True
+ is_common = False
+ scheduler_class = "ff14"
+ scheduler = scheduler("interval", {"seconds": 60})
+ has_target = False
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None:
+ return "最终幻想XIV官方公告"
+
+ async def get_sub_list(self, _) -> list[RawPost]:
+ raw_data = await self.client.get(
+ "https://cqnews.web.sdo.com/api/news/newsList?gameCode=ff&CategoryCode=5309,5310,5311,5312,5313&pageIndex=0&pageSize=5"
+ )
+ return raw_data.json()["Data"]
+
+ def get_id(self, post: RawPost) -> Any:
+ """用发布时间当作 ID
+
+ 因为有时候官方会直接编辑以前的文章内容
+ """
+ return post["PublishDate"]
+
+ def get_date(self, _: RawPost) -> None:
+ return None
+
+ async def parse(self, raw_post: RawPost) -> Post:
+ title = raw_post["Title"]
+ text = raw_post["Summary"]
+ url = raw_post["Author"]
+ return Post(self, text, title=title, url=url, nickname="最终幻想XIV官方公告")
diff --git a/src/plugins/nonebot_bison/platform/ncm.py b/src/plugins/nonebot_bison/platform/ncm.py
new file mode 100644
index 00000000..031dc93b
--- /dev/null
+++ b/src/plugins/nonebot_bison/platform/ncm.py
@@ -0,0 +1,129 @@
+import re
+from typing import Any
+
+from httpx import AsyncClient
+
+from ..post import Post
+from .platform import NewMessage
+from ..utils import SchedulerConfig
+from ..types import Target, RawPost, ApiError
+
+
+class NcmSchedConf(SchedulerConfig):
+ name = "music.163.com"
+ schedule_type = "interval"
+ schedule_setting = {"minutes": 1}
+
+
+class NcmArtist(NewMessage):
+ categories = {}
+ platform_name = "ncm-artist"
+ enable_tag = False
+ enabled = True
+ is_common = True
+ scheduler = NcmSchedConf
+ name = "网易云-歌手"
+ has_target = True
+ parse_target_promot = "请输入歌手主页(包含数字ID)的链接"
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None:
+ res = await client.get(
+ f"https://music.163.com/api/artist/albums/{target}",
+ headers={"Referer": "https://music.163.com/"},
+ )
+ res_data = res.json()
+ if res_data["code"] != 200:
+ raise ApiError(res.request.url)
+ return res_data["artist"]["name"]
+
+ @classmethod
+ async def parse_target(cls, target_text: str) -> Target:
+ if re.match(r"^\d+$", target_text):
+ return Target(target_text)
+ elif match := re.match(r"(?:https?://)?music\.163\.com/#/artist\?id=(\d+)", target_text):
+ return Target(match.group(1))
+ else:
+ raise cls.ParseTargetException()
+
+ async def get_sub_list(self, target: Target) -> list[RawPost]:
+ res = await self.client.get(
+ f"https://music.163.com/api/artist/albums/{target}",
+ headers={"Referer": "https://music.163.com/"},
+ )
+ res_data = res.json()
+ if res_data["code"] != 200:
+ return []
+ else:
+ return res_data["hotAlbums"]
+
+ def get_id(self, post: RawPost) -> Any:
+ return post["id"]
+
+ def get_date(self, post: RawPost) -> int:
+ return post["publishTime"] // 1000
+
+ async def parse(self, raw_post: RawPost) -> Post:
+ text = "新专辑发布:{}".format(raw_post["name"])
+ target_name = raw_post["artist"]["name"]
+ pics = [raw_post["picUrl"]]
+ url = "https://music.163.com/#/album?id={}".format(raw_post["id"])
+ return Post(self, text, url=url, images=pics, nickname=target_name)
+
+
+class NcmRadio(NewMessage):
+ categories = {}
+ platform_name = "ncm-radio"
+ enable_tag = False
+ enabled = True
+ is_common = False
+ scheduler = NcmSchedConf
+ name = "网易云-电台"
+ has_target = True
+ parse_target_promot = "请输入主播电台主页(包含数字ID)的链接"
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None:
+ res = await client.post(
+ "http://music.163.com/api/dj/program/byradio",
+ headers={"Referer": "https://music.163.com/"},
+ data={"radioId": target, "limit": 1000, "offset": 0},
+ )
+ res_data = res.json()
+ if res_data["code"] != 200 or res_data["programs"] == 0:
+ return
+ return res_data["programs"][0]["radio"]["name"]
+
+ @classmethod
+ async def parse_target(cls, target_text: str) -> Target:
+ if re.match(r"^\d+$", target_text):
+ return Target(target_text)
+ elif match := re.match(r"(?:https?://)?music\.163\.com/#/djradio\?id=(\d+)", target_text):
+ return Target(match.group(1))
+ else:
+ raise cls.ParseTargetException()
+
+ async def get_sub_list(self, target: Target) -> list[RawPost]:
+ res = await self.client.post(
+ "http://music.163.com/api/dj/program/byradio",
+ headers={"Referer": "https://music.163.com/"},
+ data={"radioId": target, "limit": 1000, "offset": 0},
+ )
+ res_data = res.json()
+ if res_data["code"] != 200:
+ return []
+ else:
+ return res_data["programs"]
+
+ def get_id(self, post: RawPost) -> Any:
+ return post["id"]
+
+ def get_date(self, post: RawPost) -> int:
+ return post["createTime"] // 1000
+
+ async def parse(self, raw_post: RawPost) -> Post:
+ text = "网易云电台更新:{}".format(raw_post["name"])
+ target_name = raw_post["radio"]["name"]
+ pics = [raw_post["coverUrl"]]
+ url = "https://music.163.com/#/program/{}".format(raw_post["id"])
+ return Post(self, text, url=url, images=pics, nickname=target_name)
diff --git a/src/plugins/nonebot_bison/platform/platform.py b/src/plugins/nonebot_bison/platform/platform.py
new file mode 100644
index 00000000..0c902c65
--- /dev/null
+++ b/src/plugins/nonebot_bison/platform/platform.py
@@ -0,0 +1,501 @@
+import ssl
+import json
+import time
+import typing
+from dataclasses import dataclass
+from abc import ABC, abstractmethod
+from collections import defaultdict
+from typing import Any, TypeVar, ParamSpec
+from collections.abc import Callable, Awaitable, Collection
+
+import httpx
+from httpx import AsyncClient
+from nonebot.log import logger
+from nonebot_plugin_saa import PlatformTarget
+
+from ..post import Post
+from ..plugin_config import plugin_config
+from ..utils import ProcessContext, SchedulerConfig
+from ..types import Tag, Target, RawPost, SubUnit, Category
+
+
+class CategoryNotSupport(Exception):
+ """raise in get_category, when you know the category of the post
+ but don't want to support it or don't support its parsing yet
+ """
+
+
+class CategoryNotRecognize(Exception):
+ """raise in get_category, when you don't know the category of post"""
+
+
+class RegistryMeta(type):
+ def __new__(cls, name, bases, namespace, **kwargs):
+ return super().__new__(cls, name, bases, namespace)
+
+ def __init__(cls, name, bases, namespace, **kwargs):
+ if kwargs.get("base"):
+ # this is the base class
+ cls.registry = []
+ elif not kwargs.get("abstract"):
+ # this is the subclass
+ cls.registry.append(cls)
+
+ super().__init__(name, bases, namespace, **kwargs)
+
+
+P = ParamSpec("P")
+R = TypeVar("R")
+
+
+async def catch_network_error(func: Callable[P, Awaitable[R]], *args: P.args, **kwargs: P.kwargs) -> R | None:
+ try:
+ return await func(*args, **kwargs)
+ except httpx.RequestError as err:
+ if plugin_config.bison_show_network_warning:
+ logger.warning(f"network connection error: {type(err)}, url: {err.request.url}")
+ return None
+ except ssl.SSLError as err:
+ if plugin_config.bison_show_network_warning:
+ logger.warning(f"ssl error: {err}")
+ return None
+ except json.JSONDecodeError as err:
+ logger.warning(f"json error, parsing: {err.doc}")
+ raise err
+
+
+class PlatformMeta(RegistryMeta):
+ categories: dict[Category, str]
+ store: dict[Target, Any]
+
+ def __init__(cls, name, bases, namespace, **kwargs):
+ cls.reverse_category = {}
+ cls.store = {}
+ if hasattr(cls, "categories") and cls.categories:
+ for key, val in cls.categories.items():
+ cls.reverse_category[val] = key
+ super().__init__(name, bases, namespace, **kwargs)
+
+
+class PlatformABCMeta(PlatformMeta, ABC): ...
+
+
+class Platform(metaclass=PlatformABCMeta, base=True):
+ scheduler: type[SchedulerConfig]
+ ctx: ProcessContext
+ is_common: bool
+ enabled: bool
+ name: str
+ has_target: bool
+ categories: dict[Category, str]
+ enable_tag: bool
+ platform_name: str
+ parse_target_promot: str | None = None
+ registry: list[type["Platform"]]
+ client: AsyncClient
+ reverse_category: dict[str, Category]
+ use_batch: bool = False
+ # TODO: 限定可使用的theme名称
+ default_theme: str = "basic"
+
+ @classmethod
+ @abstractmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None: ...
+
+ @abstractmethod
+ async def fetch_new_post(self, sub_unit: SubUnit) -> list[tuple[PlatformTarget, list[Post]]]: ...
+
+ async def do_fetch_new_post(self, sub_unit: SubUnit) -> list[tuple[PlatformTarget, list[Post]]]:
+ return await catch_network_error(self.fetch_new_post, sub_unit) or []
+
+ @abstractmethod
+ async def batch_fetch_new_post(self, sub_units: list[SubUnit]) -> list[tuple[PlatformTarget, list[Post]]]: ...
+
+ async def do_batch_fetch_new_post(self, sub_units: list[SubUnit]) -> list[tuple[PlatformTarget, list[Post]]]:
+ return await catch_network_error(self.batch_fetch_new_post, sub_units) or []
+
+ @abstractmethod
+ async def parse(self, raw_post: RawPost) -> Post: ...
+
+ async def do_parse(self, raw_post: RawPost) -> Post:
+ "actually function called"
+ return await self.parse(raw_post)
+
+ def __init__(self, context: ProcessContext, client: AsyncClient):
+ super().__init__()
+ self.client = client
+ self.ctx = context
+
+ class ParseTargetException(Exception):
+ pass
+
+ @classmethod
+ async def parse_target(cls, target_string: str) -> Target:
+ return Target(target_string)
+
+ @abstractmethod
+ def get_tags(self, raw_post: RawPost) -> Collection[Tag] | None:
+ "Return Tag list of given RawPost"
+
+ @classmethod
+ def get_stored_data(cls, target: Target) -> Any:
+ return cls.store.get(target)
+
+ @classmethod
+ def set_stored_data(cls, target: Target, data: Any):
+ cls.store[target] = data
+
+ def tag_separator(self, stored_tags: list[Tag]) -> tuple[list[Tag], list[Tag]]:
+ """返回分离好的正反tag元组"""
+ subscribed_tags = []
+ banned_tags = []
+ for tag in stored_tags:
+ if tag.startswith("~"):
+ banned_tags.append(tag.lstrip("~"))
+ else:
+ subscribed_tags.append(tag)
+ return subscribed_tags, banned_tags
+
+ def is_banned_post(
+ self,
+ post_tags: Collection[Tag],
+ subscribed_tags: list[Tag],
+ banned_tags: list[Tag],
+ ) -> bool:
+ """只要存在任意屏蔽tag则返回真,此行为优先级最高。
+ 存在任意被订阅tag则返回假,此行为优先级次之。
+ 若被订阅tag为空,则返回假。
+ """
+ # 存在任意需要屏蔽的tag则为真
+ if banned_tags:
+ for tag in post_tags or []:
+ if tag in banned_tags:
+ return True
+ # 检测屏蔽tag后,再检测订阅tag
+ # 存在任意需要订阅的tag则为假
+ if subscribed_tags:
+ ban_it = True
+ for tag in post_tags or []:
+ if tag in subscribed_tags:
+ ban_it = False
+ return ban_it
+ else:
+ return False
+
+ async def filter_user_custom(
+ self, raw_post_list: list[RawPost], cats: list[Category], tags: list[Tag]
+ ) -> list[RawPost]:
+ res: list[RawPost] = []
+ for raw_post in raw_post_list:
+ if self.categories:
+ cat = self.get_category(raw_post)
+ if cats and cat not in cats:
+ continue
+ if self.enable_tag and tags:
+ raw_post_tags = self.get_tags(raw_post)
+ if isinstance(raw_post_tags, Collection) and self.is_banned_post(
+ raw_post_tags, *self.tag_separator(tags)
+ ):
+ continue
+ res.append(raw_post)
+ return res
+
+ async def dispatch_user_post(
+ self, new_posts: list[RawPost], sub_unit: SubUnit
+ ) -> list[tuple[PlatformTarget, list[Post]]]:
+ res: list[tuple[PlatformTarget, list[Post]]] = []
+ for user, cats, required_tags in sub_unit.user_sub_infos:
+ user_raw_post = await self.filter_user_custom(new_posts, cats, required_tags)
+ user_post: list[Post] = []
+ for raw_post in user_raw_post:
+ user_post.append(await self.do_parse(raw_post))
+ res.append((user, user_post))
+ return res
+
+ @abstractmethod
+ def get_category(self, post: RawPost) -> Category | None:
+ "Return category of given Rawpost"
+ raise NotImplementedError()
+
+
+class MessageProcess(Platform, abstract=True):
+ "General message process fetch, parse, filter progress"
+
+ def __init__(self, ctx: ProcessContext, client: AsyncClient):
+ super().__init__(ctx, client)
+ self.parse_cache: dict[Any, Post] = {}
+
+ @abstractmethod
+ def get_id(self, post: RawPost) -> Any:
+ "Get post id of given RawPost"
+
+ async def do_parse(self, raw_post: RawPost) -> Post:
+ post_id = self.get_id(raw_post)
+ if post_id not in self.parse_cache:
+ retry_times = 3
+ while retry_times:
+ try:
+ self.parse_cache[post_id] = await self.parse(raw_post)
+ break
+ except Exception as err:
+ retry_times -= 1
+ if not retry_times:
+ raise err
+ return self.parse_cache[post_id]
+
+ @abstractmethod
+ async def get_sub_list(self, target: Target) -> list[RawPost]:
+ "Get post list of the given target"
+ raise NotImplementedError()
+
+ @abstractmethod
+ async def batch_get_sub_list(self, targets: list[Target]) -> list[list[RawPost]]:
+ "Get post list of the given targets"
+ raise NotImplementedError()
+
+ @abstractmethod
+ def get_date(self, post: RawPost) -> int | None:
+ "Get post timestamp and return, return None if can't get the time"
+
+ async def filter_common(self, raw_post_list: list[RawPost]) -> list[RawPost]:
+ res = []
+ for raw_post in raw_post_list:
+ # post_id = self.get_id(raw_post)
+ # if post_id in exists_posts_set:
+ # continue
+ if (
+ (post_time := self.get_date(raw_post))
+ and time.time() - post_time > 2 * 60 * 60
+ and plugin_config.bison_init_filter
+ ):
+ continue
+ try:
+ self.get_category(raw_post)
+ except CategoryNotSupport as e:
+ logger.info("未支持解析的推文类别:" + repr(e) + ",忽略")
+ continue
+ except CategoryNotRecognize as e:
+ logger.warning("未知推文类别:" + repr(e))
+ msgs = self.ctx.gen_req_records()
+ for m in msgs:
+ logger.warning(m)
+ continue
+ except NotImplementedError:
+ pass
+ res.append(raw_post)
+ return res
+
+
+class NewMessage(MessageProcess, abstract=True):
+ "Fetch a list of messages, filter the new messages, dispatch it to different users"
+
+ @dataclass
+ class MessageStorage:
+ inited: bool
+ exists_posts: set[Any]
+
+ async def filter_common_with_diff(self, target: Target, raw_post_list: list[RawPost]) -> list[RawPost]:
+ filtered_post = await self.filter_common(raw_post_list)
+ store = self.get_stored_data(target) or self.MessageStorage(False, set())
+ res = []
+ if not store.inited and plugin_config.bison_init_filter:
+ # target not init
+ for raw_post in filtered_post:
+ post_id = self.get_id(raw_post)
+ store.exists_posts.add(post_id)
+ logger.info(f"init {self.platform_name}-{target} with {store.exists_posts}")
+ store.inited = True
+ else:
+ for raw_post in filtered_post:
+ post_id = self.get_id(raw_post)
+ if post_id in store.exists_posts:
+ continue
+ res.append(raw_post)
+ store.exists_posts.add(post_id)
+ self.set_stored_data(target, store)
+ return res
+
+ async def _handle_new_post(
+ self,
+ post_list: list[RawPost],
+ sub_unit: SubUnit,
+ ) -> list[tuple[PlatformTarget, list[Post]]]:
+ new_posts = await self.filter_common_with_diff(sub_unit.sub_target, post_list)
+ if not new_posts:
+ return []
+ else:
+ for post in new_posts:
+ logger.info(
+ "fetch new post from {} {}: {}".format(
+ self.platform_name,
+ sub_unit.sub_target if self.has_target else "-",
+ self.get_id(post),
+ )
+ )
+ res = await self.dispatch_user_post(new_posts, sub_unit)
+ self.parse_cache = {}
+ return res
+
+ async def fetch_new_post(self, sub_unit: SubUnit) -> list[tuple[PlatformTarget, list[Post]]]:
+ post_list = await self.get_sub_list(sub_unit.sub_target)
+ return await self._handle_new_post(post_list, sub_unit)
+
+ async def batch_fetch_new_post(self, sub_units: list[SubUnit]) -> list[tuple[PlatformTarget, list[Post]]]:
+ if not self.has_target:
+ raise RuntimeError("Target without target should not use batch api") # pragma: no cover
+ posts_set = await self.batch_get_sub_list([x[0] for x in sub_units])
+ res = []
+ for sub_unit, posts in zip(sub_units, posts_set):
+ res.extend(await self._handle_new_post(posts, sub_unit))
+ return res
+
+
+class StatusChange(Platform, abstract=True):
+ "Watch a status, and fire a post when status changes"
+
+ class FetchError(RuntimeError):
+ pass
+
+ @abstractmethod
+ async def get_status(self, target: Target) -> Any: ...
+
+ @abstractmethod
+ async def batch_get_status(self, targets: list[Target]) -> list[Any]: ...
+
+ @abstractmethod
+ def compare_status(self, target: Target, old_status, new_status) -> list[RawPost]: ...
+
+ @abstractmethod
+ async def parse(self, raw_post: RawPost) -> Post: ...
+
+ async def _handle_status_change(
+ self, new_status: Any, sub_unit: SubUnit
+ ) -> list[tuple[PlatformTarget, list[Post]]]:
+ res = []
+ if old_status := self.get_stored_data(sub_unit.sub_target):
+ diff = self.compare_status(sub_unit.sub_target, old_status, new_status)
+ if diff:
+ logger.info(
+ "status changes {} {}: {} -> {}".format(
+ self.platform_name,
+ sub_unit.sub_target if self.has_target else "-",
+ old_status,
+ new_status,
+ )
+ )
+ res = await self.dispatch_user_post(diff, sub_unit)
+ self.set_stored_data(sub_unit.sub_target, new_status)
+ return res
+
+ async def fetch_new_post(self, sub_unit: SubUnit) -> list[tuple[PlatformTarget, list[Post]]]:
+ try:
+ new_status = await self.get_status(sub_unit.sub_target)
+ except self.FetchError as err:
+ logger.warning(f"fetching {self.name}-{sub_unit.sub_target} error: {err}")
+ raise
+ return await self._handle_status_change(new_status, sub_unit)
+
+ async def batch_fetch_new_post(self, sub_units: list[SubUnit]) -> list[tuple[PlatformTarget, list[Post]]]:
+ if not self.has_target:
+ raise RuntimeError("Target without target should not use batch api") # pragma: no cover
+ new_statuses = await self.batch_get_status([x[0] for x in sub_units])
+ res = []
+ for sub_unit, new_status in zip(sub_units, new_statuses):
+ res.extend(await self._handle_status_change(new_status, sub_unit))
+ return res
+
+
+class SimplePost(NewMessage, abstract=True):
+ "Fetch a list of messages, dispatch it to different users"
+
+ async def _handle_new_post(
+ self,
+ new_posts: list[RawPost],
+ sub_unit: SubUnit,
+ ) -> list[tuple[PlatformTarget, list[Post]]]:
+ if not new_posts:
+ return []
+ else:
+ for post in new_posts:
+ logger.info(
+ "fetch new post from {} {}: {}".format(
+ self.platform_name,
+ sub_unit.sub_target if self.has_target else "-",
+ self.get_id(post),
+ )
+ )
+ res = await self.dispatch_user_post(new_posts, sub_unit)
+ self.parse_cache = {}
+ return res
+
+
+def make_no_target_group(platform_list: list[type[Platform]]) -> type[Platform]:
+ if typing.TYPE_CHECKING:
+
+ class NoTargetGroup(Platform, abstract=True):
+ platform_list: list[type[Platform]]
+ platform_obj_list: list[Platform]
+
+ DUMMY_STR = "_DUMMY"
+
+ platform_name = platform_list[0].platform_name
+ name = DUMMY_STR
+ categories_keys = set()
+ categories = {}
+ scheduler = platform_list[0].scheduler
+
+ for platform in platform_list:
+ if platform.has_target:
+ raise RuntimeError(f"Platform {platform.name} should have no target")
+ if name == DUMMY_STR:
+ name = platform.name
+ elif name != platform.name:
+ raise RuntimeError(f"Platform name for {platform_name} not fit")
+ platform_category_key_set = set(platform.categories.keys())
+ if platform_category_key_set & categories_keys:
+ raise RuntimeError(f"Platform categories for {platform_name} duplicate")
+ categories_keys |= platform_category_key_set
+ categories.update(platform.categories)
+ if platform.scheduler != scheduler:
+ raise RuntimeError(f"Platform scheduler for {platform_name} not fit")
+
+ def __init__(self: "NoTargetGroup", ctx: ProcessContext, client: AsyncClient):
+ Platform.__init__(self, ctx, client)
+ self.platform_obj_list = []
+ for platform_class in self.platform_list:
+ self.platform_obj_list.append(platform_class(ctx, client))
+
+ def __str__(self: "NoTargetGroup") -> str:
+ return "[" + " ".join(x.name for x in self.platform_list) + "]"
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target):
+ return await platform_list[0].get_target_name(client, target)
+
+ async def fetch_new_post(self: "NoTargetGroup", sub_unit: SubUnit):
+ res = defaultdict(list)
+ for platform in self.platform_obj_list:
+ platform_res = await platform.fetch_new_post(sub_unit)
+ for user, posts in platform_res:
+ res[user].extend(posts)
+ return [[key, val] for key, val in res.items()]
+
+ return type(
+ "NoTargetGroup",
+ (Platform,),
+ {
+ "platform_list": platform_list,
+ "platform_name": platform_list[0].platform_name,
+ "name": name,
+ "categories": categories,
+ "scheduler": scheduler,
+ "is_common": platform_list[0].is_common,
+ "enabled": True,
+ "has_target": False,
+ "enable_tag": False,
+ "__init__": __init__,
+ "get_target_name": get_target_name,
+ "fetch_new_post": fetch_new_post,
+ },
+ abstract=True,
+ )
diff --git a/src/plugins/nonebot_bison/platform/rss.py b/src/plugins/nonebot_bison/platform/rss.py
new file mode 100644
index 00000000..a7af5929
--- /dev/null
+++ b/src/plugins/nonebot_bison/platform/rss.py
@@ -0,0 +1,81 @@
+import time
+import calendar
+from typing import Any
+
+import feedparser
+from httpx import AsyncClient
+from bs4 import BeautifulSoup as bs
+
+from ..post import Post
+from .platform import NewMessage
+from ..types import Target, RawPost
+from ..utils import SchedulerConfig, text_similarity
+
+
+class RssSchedConf(SchedulerConfig):
+ name = "rss"
+ schedule_type = "interval"
+ schedule_setting = {"seconds": 30}
+
+
+class Rss(NewMessage):
+ categories = {}
+ enable_tag = False
+ platform_name = "rss"
+ name = "Rss"
+ enabled = True
+ is_common = True
+ scheduler = RssSchedConf
+ has_target = True
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None:
+ res = await client.get(target, timeout=10.0)
+ feed = feedparser.parse(res.text)
+ return feed["feed"]["title"]
+
+ def get_date(self, post: RawPost) -> int:
+ if hasattr(post, "published_parsed"):
+ return calendar.timegm(post.published_parsed)
+ elif hasattr(post, "updated_parsed"):
+ return calendar.timegm(post.updated_parsed)
+ else:
+ return calendar.timegm(time.gmtime())
+
+ def get_id(self, post: RawPost) -> Any:
+ return post.id
+
+ async def get_sub_list(self, target: Target) -> list[RawPost]:
+ res = await self.client.get(target, timeout=10.0)
+ feed = feedparser.parse(res)
+ entries = feed.entries
+ for entry in entries:
+ entry["_target_name"] = feed.feed.title
+ return feed.entries
+
+ def _text_process(self, title: str, desc: str) -> tuple[str | None, str]:
+ """检查标题和描述是否相似,如果相似则标题为None, 否则返回标题和描述"""
+ similarity = 1.0 if len(title) == 0 or len(desc) == 0 else text_similarity(title, desc)
+ if similarity > 0.8:
+ return None, title if len(title) > len(desc) else desc
+
+ return title, desc
+
+ async def parse(self, raw_post: RawPost) -> Post:
+ title = raw_post.get("title", "")
+ soup = bs(raw_post.description, "html.parser")
+ desc = soup.text.strip()
+ title, desc = self._text_process(title, desc)
+ pics = [x.attrs["src"] for x in soup("img")]
+ if raw_post.get("media_content"):
+ for media in raw_post["media_content"]:
+ if media.get("medium") == "image" and media.get("url"):
+ pics.append(media.get("url"))
+ return Post(
+ self,
+ desc,
+ title=title,
+ url=raw_post.link,
+ images=pics,
+ nickname=raw_post["_target_name"],
+ )
diff --git a/src/plugins/nonebot_bison/platform/weibo.py b/src/plugins/nonebot_bison/platform/weibo.py
new file mode 100644
index 00000000..54c2a52a
--- /dev/null
+++ b/src/plugins/nonebot_bison/platform/weibo.py
@@ -0,0 +1,191 @@
+import re
+import json
+from typing import Any
+from datetime import datetime
+from urllib.parse import unquote
+
+from yarl import URL
+from lxml import etree
+from httpx import AsyncClient
+from nonebot.log import logger
+from bs4 import BeautifulSoup as bs
+
+from ..post import Post
+from .platform import NewMessage
+from ..utils import SchedulerConfig, http_client
+from ..types import Tag, Target, RawPost, ApiError, Category
+
+_HEADER = {
+ "accept": (
+ "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,"
+ "*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
+ ),
+ "accept-language": "zh-CN,zh;q=0.9",
+ "authority": "m.weibo.cn",
+ "cache-control": "max-age=0",
+ "sec-fetch-dest": "empty",
+ "sec-fetch-mode": "same-origin",
+ "sec-fetch-site": "same-origin",
+ "upgrade-insecure-requests": "1",
+ "user-agent": (
+ "Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) "
+ "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.72 "
+ "Mobile Safari/537.36"
+ ),
+}
+
+
+class WeiboSchedConf(SchedulerConfig):
+ name = "weibo.com"
+ schedule_type = "interval"
+ schedule_setting = {"seconds": 3}
+
+
+class Weibo(NewMessage):
+ categories = {
+ 1: "转发",
+ 2: "视频",
+ 3: "图文",
+ 4: "文字",
+ }
+ enable_tag = True
+ platform_name = "weibo"
+ name = "新浪微博"
+ enabled = True
+ is_common = True
+ scheduler = WeiboSchedConf
+ has_target = True
+ parse_target_promot = "请输入用户主页(包含数字UID)的链接"
+
+ @classmethod
+ async def get_target_name(cls, client: AsyncClient, target: Target) -> str | None:
+ param = {"containerid": "100505" + target}
+ res = await client.get("https://m.weibo.cn/api/container/getIndex", params=param)
+ res_dict = json.loads(res.text)
+ if res_dict.get("ok") == 1:
+ return res_dict["data"]["userInfo"]["screen_name"]
+ else:
+ return None
+
+ @classmethod
+ async def parse_target(cls, target_text: str) -> Target:
+ if re.match(r"\d+", target_text):
+ return Target(target_text)
+ elif match := re.match(r"(?:https?://)?weibo\.com/u/(\d+)", target_text):
+ # 都2202年了应该不会有http了吧,不过还是防一手
+ return Target(match.group(1))
+ else:
+ raise cls.ParseTargetException()
+
+ async def get_sub_list(self, target: Target) -> list[RawPost]:
+ params = {"containerid": "107603" + target}
+ res = await self.client.get("https://m.weibo.cn/api/container/getIndex?", params=params, timeout=4.0)
+ res_data = json.loads(res.text)
+ if not res_data["ok"] and res_data["msg"] != "这里还没有内容":
+ raise ApiError(res.request.url)
+
+ def custom_filter(d: RawPost) -> bool:
+ return d["card_type"] == 9
+
+ return list(filter(custom_filter, res_data["data"]["cards"]))
+
+ def get_id(self, post: RawPost) -> Any:
+ return post["mblog"]["id"]
+
+ def filter_platform_custom(self, raw_post: RawPost) -> bool:
+ return raw_post["card_type"] == 9
+
+ def get_date(self, raw_post: RawPost) -> float:
+ created_time = datetime.strptime(raw_post["mblog"]["created_at"], "%a %b %d %H:%M:%S %z %Y")
+ return created_time.timestamp()
+
+ def get_tags(self, raw_post: RawPost) -> list[Tag] | None:
+ "Return Tag list of given RawPost"
+ text = raw_post["mblog"]["text"]
+ soup = bs(text, "html.parser")
+ res = [
+ x[1:-1]
+ for x in filter(
+ lambda s: s[0] == "#" and s[-1] == "#",
+ (x.text for x in soup.find_all("span", class_="surl-text")),
+ )
+ ]
+ super_topic_img = soup.find("img", src=re.compile(r"timeline_card_small_super_default"))
+ if super_topic_img:
+ try:
+ res.append(super_topic_img.parent.parent.find("span", class_="surl-text").text + "超话") # type: ignore
+ except Exception:
+ logger.info(f"super_topic extract error: {text}")
+ return res
+
+ def get_category(self, raw_post: RawPost) -> Category:
+ if raw_post["mblog"].get("retweeted_status"):
+ return Category(1)
+ elif raw_post["mblog"].get("page_info") and raw_post["mblog"]["page_info"].get("type") == "video":
+ return Category(2)
+ elif raw_post["mblog"].get("pics"):
+ return Category(3)
+ else:
+ return Category(4)
+
+ def _get_text(self, raw_text: str) -> str:
+ text = raw_text.replace("
", "\n").replace("
", "\n")
+ selector = etree.HTML(text, parser=None)
+ if selector is None:
+ return text
+ url_elems = selector.xpath("//a[@href]/span[@class='surl-text']")
+ for br in selector.xpath("br"):
+ br.tail = "\n" + br.tail
+ for elem in url_elems:
+ url = elem.getparent().get("href")
+ if (
+ not elem.text.startswith("#")
+ and not elem.text.endswith("#")
+ and (url.startswith("https://weibo.cn/sinaurl?u=") or url.startswith("https://video.weibo.com"))
+ ):
+ url = unquote(url.replace("https://weibo.cn/sinaurl?u=", ""))
+ elem.text = f"{elem.text}( {url} )"
+ return selector.xpath("string(.)")
+
+ async def _get_long_weibo(self, weibo_id: str) -> dict:
+ try:
+ weibo_info = await self.client.get(
+ "https://m.weibo.cn/statuses/show",
+ params={"id": weibo_id},
+ headers=_HEADER,
+ )
+ weibo_info = weibo_info.json()
+ if not weibo_info or weibo_info["ok"] != 1:
+ return {}
+ return weibo_info["data"]
+ except (KeyError, TimeoutError):
+ logger.info(f"detail message error: https://m.weibo.cn/detail/{weibo_id}")
+ return {}
+
+ async def _parse_weibo(self, info: dict) -> Post:
+ if info["isLongText"] or info["pic_num"] > 9:
+ info["text"] = (await self._get_long_weibo(info["mid"]))["text"]
+ parsed_text = self._get_text(info["text"])
+ raw_pics_list = info.get("pics", [])
+ pic_urls = [img["large"]["url"] for img in raw_pics_list]
+ # 视频cover
+ if "page_info" in info and info["page_info"].get("type") == "video":
+ crop_url = info["page_info"]["page_pic"]["url"]
+ pic_urls.append(
+ f"{URL(crop_url).scheme}://{URL(crop_url).host}/large/{info['page_info']['page_pic']['pid']}"
+ )
+ pics = []
+ for pic_url in pic_urls:
+ async with http_client(headers={"referer": "https://weibo.com"}) as client:
+ res = await client.get(pic_url)
+ res.raise_for_status()
+ pics.append(res.content)
+ detail_url = f"https://weibo.com/{info['user']['id']}/{info['bid']}"
+ return Post(self, parsed_text, url=detail_url, images=pics, nickname=info["user"]["screen_name"])
+
+ async def parse(self, raw_post: RawPost) -> Post:
+ info = raw_post["mblog"]
+ post = await self._parse_weibo(info)
+ if "retweeted_status" in info:
+ post.repost = await self._parse_weibo(info["retweeted_status"])
+ return post
diff --git a/src/plugins/nonebot_bison/plugin_config.py b/src/plugins/nonebot_bison/plugin_config.py
new file mode 100644
index 00000000..a0734eb3
--- /dev/null
+++ b/src/plugins/nonebot_bison/plugin_config.py
@@ -0,0 +1,43 @@
+import nonebot
+from yarl import URL
+from nonebot import get_plugin_config
+from pydantic import Field, BaseModel
+
+global_config = nonebot.get_driver().config
+PlatformName = str
+ThemeName = str
+
+
+class PlugConfig(BaseModel):
+ bison_config_path: str = ""
+ bison_use_pic: bool = Field(
+ default=False,
+ description="发送消息时将所有文本转换为图片,防止风控,仅需要推送文转图可以为 platform 指定 theme",
+ )
+ bison_theme_use_browser: bool = Field(default=False, description="是否允许主题使用浏览器")
+ bison_init_filter: bool = True
+ bison_use_queue: bool = True
+ bison_outer_url: str = ""
+ bison_filter_log: bool = False
+ bison_to_me: bool = True
+ bison_skip_browser_check: bool = False
+ bison_use_pic_merge: int = 0 # 多图片时启用图片合并转发(仅限群)
+ # 0:不启用;1:首条消息单独发送,剩余照片合并转发;2以及以上:所有消息全部合并转发
+ bison_resend_times: int = 0
+ bison_proxy: str | None = None
+ bison_ua: str = Field(
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",
+ description="默认UA",
+ )
+ bison_show_network_warning: bool = True
+ bison_platform_theme: dict[PlatformName, ThemeName] = {}
+
+ @property
+ def outer_url(self) -> URL:
+ if self.bison_outer_url:
+ return URL(self.bison_outer_url)
+ else:
+ return URL(f"http://localhost:{global_config.port}/bison/")
+
+
+plugin_config = get_plugin_config(PlugConfig)
diff --git a/src/plugins/nonebot_bison/post/__init__.py b/src/plugins/nonebot_bison/post/__init__.py
new file mode 100644
index 00000000..0cc05146
--- /dev/null
+++ b/src/plugins/nonebot_bison/post/__init__.py
@@ -0,0 +1 @@
+from .post import Post as Post
diff --git a/src/plugins/nonebot_bison/post/abstract_post.py b/src/plugins/nonebot_bison/post/abstract_post.py
new file mode 100644
index 00000000..2a76a44d
--- /dev/null
+++ b/src/plugins/nonebot_bison/post/abstract_post.py
@@ -0,0 +1,51 @@
+from dataclasses import dataclass
+from abc import ABC, abstractmethod
+
+from nonebot_plugin_saa import Text, MessageFactory, MessageSegmentFactory
+
+from ..utils import text_to_image
+from ..plugin_config import plugin_config
+
+
+@dataclass(kw_only=True)
+class AbstractPost(ABC):
+ compress: bool = False
+ extra_msg: list[MessageFactory] | None = None
+
+ @abstractmethod
+ async def generate(self) -> list[MessageSegmentFactory]:
+ "Generate MessageSegmentFactory list from this instance"
+ ...
+
+ async def generate_messages(self) -> list[MessageFactory]:
+ "really call to generate messages"
+ msg_segments = await self.generate()
+ msg_segments = await self.message_segments_process(msg_segments)
+ msgs = await self.message_process(msg_segments)
+ return msgs
+
+ async def message_segments_process(self, msg_segments: list[MessageSegmentFactory]) -> list[MessageSegmentFactory]:
+ "generate message segments and process them"
+
+ async def convert(msg: MessageSegmentFactory) -> MessageSegmentFactory:
+ if isinstance(msg, Text):
+ return await text_to_image(msg)
+ else:
+ return msg
+
+ if plugin_config.bison_use_pic:
+ return [await convert(msg) for msg in msg_segments]
+
+ return msg_segments
+
+ async def message_process(self, msg_segments: list[MessageSegmentFactory]) -> list[MessageFactory]:
+ "generate messages and process them"
+ if self.compress:
+ msgs = [MessageFactory(msg_segments)]
+ else:
+ msgs = [MessageFactory(msg_segment) for msg_segment in msg_segments]
+
+ if self.extra_msg:
+ msgs.extend(self.extra_msg)
+
+ return msgs
diff --git a/src/plugins/nonebot_bison/post/post.py b/src/plugins/nonebot_bison/post/post.py
new file mode 100644
index 00000000..5e68925c
--- /dev/null
+++ b/src/plugins/nonebot_bison/post/post.py
@@ -0,0 +1,109 @@
+import reprlib
+from io import BytesIO
+from pathlib import Path
+from typing import TYPE_CHECKING
+from dataclasses import fields, dataclass
+
+from nonebot.log import logger
+from nonebot_plugin_saa import MessageSegmentFactory
+
+from ..theme import theme_manager
+from .abstract_post import AbstractPost
+from ..plugin_config import plugin_config
+from ..theme.types import ThemeRenderError, ThemeRenderUnsupportError
+
+if TYPE_CHECKING:
+ from ..platform import Platform
+
+
+@dataclass
+class Post(AbstractPost):
+ """最通用的Post,理论上包含所有常用的数据
+
+ 对于更特殊的需要,可以考虑另外实现一个Post
+ """
+
+ platform: "Platform"
+ """来源平台"""
+ content: str
+ """文本内容"""
+ title: str | None = None
+ """标题"""
+ images: list[str | bytes | Path | BytesIO] | None = None
+ """图片列表"""
+ timestamp: int | None = None
+ """发布/获取时间戳"""
+ url: str | None = None
+ """来源链接"""
+ avatar: str | bytes | Path | BytesIO | None = None
+ """发布者头像"""
+ nickname: str | None = None
+ """发布者昵称"""
+ description: str | None = None
+ """发布者个性签名等"""
+ repost: "Post | None" = None
+ """转发的Post"""
+
+ def get_config_theme(self) -> str | None:
+ """获取用户指定的theme"""
+ return plugin_config.bison_platform_theme.get(self.platform.platform_name)
+
+ def get_priority_themes(self) -> list[str]:
+ """获取渲染所使用的theme名列表,按照优先级排序"""
+ themes_by_priority: list[str] = []
+ # 最先使用用户指定的theme
+ if user_theme := self.get_config_theme():
+ themes_by_priority.append(user_theme)
+ # 然后使用平台默认的theme
+ if self.platform.default_theme not in themes_by_priority:
+ themes_by_priority.append(self.platform.default_theme)
+ # 最后使用最基础的theme
+ if "basic" not in themes_by_priority:
+ themes_by_priority.append("basic")
+ return themes_by_priority
+
+ async def generate(self) -> list[MessageSegmentFactory]:
+ """生成消息"""
+ themes = self.get_priority_themes()
+ for theme_name in themes:
+ if theme := theme_manager[theme_name]:
+ try:
+ logger.debug(f"Try to render Post with theme {theme_name}")
+ return await theme.do_render(self)
+ except ThemeRenderUnsupportError as e:
+ logger.warning(
+ f"Theme {theme_name} does not support Post of {self.platform.__class__.__name__}: {e}"
+ )
+ continue
+ except ThemeRenderError as e:
+ logger.exception(f"Theme {theme_name} render error: {e}")
+ continue
+ else:
+ logger.error(f"Theme {theme_name} not found")
+ continue
+ else:
+ raise ThemeRenderError(f"No theme can render Post of {self.platform.__class__.__name__}")
+
+ def __str__(self) -> str:
+ aRepr = reprlib.Repr()
+ aRepr.maxstring = 100
+
+ post_format = f"""## Post: {id(self):X} ##
+
+{self.content if len(self.content) < 200 else self.content[:200] + '...'}
+
+来源:
+"""
+ post_format += "附加信息:\n"
+ for field in fields(self):
+ if field.name in ("content", "platform", "repost"):
+ continue
+ value = getattr(self, field.name)
+ if value is not None:
+ post_format += f"- {field.name}: {aRepr.repr(value)}\n"
+
+ if self.repost:
+ post_format += "\n转发:\n"
+ post_format += str(self.repost)
+
+ return post_format
diff --git a/src/plugins/nonebot_bison/scheduler/__init__.py b/src/plugins/nonebot_bison/scheduler/__init__.py
new file mode 100644
index 00000000..19c9284c
--- /dev/null
+++ b/src/plugins/nonebot_bison/scheduler/__init__.py
@@ -0,0 +1,3 @@
+from .manager import init_scheduler, scheduler_dict, handle_delete_target, handle_insert_new_target
+
+__all__ = ["init_scheduler", "handle_delete_target", "handle_insert_new_target", "scheduler_dict"]
diff --git a/src/plugins/nonebot_bison/scheduler/manager.py b/src/plugins/nonebot_bison/scheduler/manager.py
new file mode 100644
index 00000000..b2ab3cc3
--- /dev/null
+++ b/src/plugins/nonebot_bison/scheduler/manager.py
@@ -0,0 +1,50 @@
+from ..config import config
+from .scheduler import Scheduler
+from ..utils import SchedulerConfig
+from ..config.db_model import Target
+from ..types import Target as T_Target
+from ..platform import platform_manager
+
+scheduler_dict: dict[type[SchedulerConfig], Scheduler] = {}
+
+
+async def init_scheduler():
+ _schedule_class_dict: dict[type[SchedulerConfig], list[Target]] = {}
+ _schedule_class_platform_dict: dict[type[SchedulerConfig], list[str]] = {}
+ for platform in platform_manager.values():
+ scheduler_config = platform.scheduler
+ if not hasattr(scheduler_config, "name") or not scheduler_config.name:
+ scheduler_config.name = f"AnonymousScheduleConfig[{platform.platform_name}]"
+
+ platform_name = platform.platform_name
+ targets = await config.get_platform_target(platform_name)
+ if scheduler_config not in _schedule_class_dict:
+ _schedule_class_dict[scheduler_config] = list(targets)
+ else:
+ _schedule_class_dict[scheduler_config].extend(targets)
+ if scheduler_config not in _schedule_class_platform_dict:
+ _schedule_class_platform_dict[scheduler_config] = [platform_name]
+ else:
+ _schedule_class_platform_dict[scheduler_config].append(platform_name)
+ for scheduler_config, target_list in _schedule_class_dict.items():
+ schedulable_args = []
+ for target in target_list:
+ schedulable_args.append(
+ (target.platform_name, T_Target(target.target), platform_manager[target.platform_name].use_batch)
+ )
+ platform_name_list = _schedule_class_platform_dict[scheduler_config]
+ scheduler_dict[scheduler_config] = Scheduler(scheduler_config, schedulable_args, platform_name_list)
+ config.register_add_target_hook(handle_insert_new_target)
+ config.register_delete_target_hook(handle_delete_target)
+
+
+async def handle_insert_new_target(platform_name: str, target: T_Target):
+ platform = platform_manager[platform_name]
+ scheduler_obj = scheduler_dict[platform.scheduler]
+ scheduler_obj.insert_new_schedulable(platform_name, target)
+
+
+async def handle_delete_target(platform_name: str, target: T_Target):
+ platform = platform_manager[platform_name]
+ scheduler_obj = scheduler_dict[platform.scheduler]
+ scheduler_obj.delete_schedulable(platform_name, target)
diff --git a/src/plugins/nonebot_bison/scheduler/scheduler.py b/src/plugins/nonebot_bison/scheduler/scheduler.py
new file mode 100644
index 00000000..b1fc5302
--- /dev/null
+++ b/src/plugins/nonebot_bison/scheduler/scheduler.py
@@ -0,0 +1,153 @@
+from dataclasses import dataclass
+from collections import defaultdict
+
+from nonebot.log import logger
+from nonebot_plugin_apscheduler import scheduler
+from nonebot_plugin_saa.utils.exceptions import NoBotFound
+
+from ..config import config
+from ..send import send_msgs
+from ..types import Target, SubUnit
+from ..platform import platform_manager
+from ..utils import ProcessContext, SchedulerConfig
+
+
+@dataclass
+class Schedulable:
+ platform_name: str
+ target: Target
+ current_weight: int
+ use_batch: bool = False
+
+
+class Scheduler:
+ schedulable_list: list[Schedulable] # for load weigth from db
+ batch_api_target_cache: dict[str, dict[Target, list[Target]]] # platform_name -> (target -> [target])
+ batch_platform_name_targets_cache: dict[str, list[Target]]
+
+ def __init__(
+ self,
+ scheduler_config: type[SchedulerConfig],
+ schedulables: list[tuple[str, Target, bool]], # [(platform_name, target, use_batch)]
+ platform_name_list: list[str],
+ ):
+ self.name = scheduler_config.name
+ if not scheduler_config:
+ logger.error(f"scheduler config [{self.name}] not found, exiting")
+ raise RuntimeError(f"{self.name} not found")
+ self.scheduler_config = scheduler_config
+ self.scheduler_config_obj = self.scheduler_config()
+
+ self.schedulable_list = []
+ self.batch_platform_name_targets_cache: dict[str, list[Target]] = defaultdict(list)
+ for platform_name, target, use_batch in schedulables:
+ if use_batch:
+ self.batch_platform_name_targets_cache[platform_name].append(target)
+ self.schedulable_list.append(
+ Schedulable(platform_name=platform_name, target=target, current_weight=0, use_batch=use_batch)
+ )
+ self._refresh_batch_api_target_cache()
+
+ self.platform_name_list = platform_name_list
+ self.pre_weight_val = 0 # 轮调度中“本轮”增加权重和的初值
+ logger.info(
+ f"register scheduler for {self.name} with "
+ f"{self.scheduler_config.schedule_type} {self.scheduler_config.schedule_setting}"
+ )
+ scheduler.add_job(
+ self.exec_fetch,
+ self.scheduler_config.schedule_type,
+ **self.scheduler_config.schedule_setting,
+ )
+
+ def _refresh_batch_api_target_cache(self):
+ self.batch_api_target_cache = defaultdict(dict)
+ for platform_name, targets in self.batch_platform_name_targets_cache.items():
+ for target in targets:
+ self.batch_api_target_cache[platform_name][target] = targets
+
+ async def get_next_schedulable(self) -> Schedulable | None:
+ if not self.schedulable_list:
+ return None
+ cur_weight = await config.get_current_weight_val(self.platform_name_list)
+ weight_sum = self.pre_weight_val
+ self.pre_weight_val = 0
+ cur_max_schedulable = None
+ for schedulable in self.schedulable_list:
+ schedulable.current_weight += cur_weight[f"{schedulable.platform_name}-{schedulable.target}"]
+ weight_sum += cur_weight[f"{schedulable.platform_name}-{schedulable.target}"]
+ if not cur_max_schedulable or cur_max_schedulable.current_weight < schedulable.current_weight:
+ cur_max_schedulable = schedulable
+ assert cur_max_schedulable
+ cur_max_schedulable.current_weight -= weight_sum
+ return cur_max_schedulable
+
+ async def exec_fetch(self):
+ context = ProcessContext()
+ if not (schedulable := await self.get_next_schedulable()):
+ return
+ logger.trace(f"scheduler {self.name} fetching next target: [{schedulable.platform_name}]{schedulable.target}")
+
+ client = await self.scheduler_config_obj.get_client(schedulable.target)
+ context.register_to_client(client)
+
+ try:
+ platform_obj = platform_manager[schedulable.platform_name](context, client)
+ if schedulable.use_batch:
+ batch_targets = self.batch_api_target_cache[schedulable.platform_name][schedulable.target]
+ sub_units = []
+ for batch_target in batch_targets:
+ userinfo = await config.get_platform_target_subscribers(schedulable.platform_name, batch_target)
+ sub_units.append(SubUnit(batch_target, userinfo))
+ to_send = await platform_obj.do_batch_fetch_new_post(sub_units)
+ else:
+ send_userinfo_list = await config.get_platform_target_subscribers(
+ schedulable.platform_name, schedulable.target
+ )
+ to_send = await platform_obj.do_fetch_new_post(SubUnit(schedulable.target, send_userinfo_list))
+ except Exception as err:
+ records = context.gen_req_records()
+ for record in records:
+ logger.warning("API request record: " + record)
+ err.args += (records,)
+ raise
+
+ if not to_send:
+ return
+
+ for user, send_list in to_send:
+ for send_post in send_list:
+ logger.info(f"send to {user}: {send_post}")
+ try:
+ await send_msgs(
+ user,
+ await send_post.generate_messages(),
+ )
+ except NoBotFound:
+ logger.warning("no bot connected")
+
+ def insert_new_schedulable(self, platform_name: str, target: Target):
+ self.pre_weight_val += 1000
+ self.schedulable_list.append(Schedulable(platform_name, target, 1000))
+
+ if platform_manager[platform_name].use_batch:
+ self.batch_platform_name_targets_cache[platform_name].append(target)
+ self._refresh_batch_api_target_cache()
+
+ logger.info(f"insert [{platform_name}]{target} to Schduler({self.scheduler_config.name})")
+
+ def delete_schedulable(self, platform_name, target: Target):
+ if platform_manager[platform_name].use_batch:
+ self.batch_platform_name_targets_cache[platform_name].remove(target)
+ self._refresh_batch_api_target_cache()
+
+ if not self.schedulable_list:
+ return
+ to_find_idx = None
+ for idx, schedulable in enumerate(self.schedulable_list):
+ if schedulable.platform_name == platform_name and schedulable.target == target:
+ to_find_idx = idx
+ break
+ if to_find_idx is not None:
+ deleted_schdulable = self.schedulable_list.pop(to_find_idx)
+ self.pre_weight_val -= deleted_schdulable.current_weight
diff --git a/src/plugins/nonebot_bison/script/__init__.py b/src/plugins/nonebot_bison/script/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/src/plugins/nonebot_bison/script/cli.py b/src/plugins/nonebot_bison/script/cli.py
new file mode 100644
index 00000000..291c102f
--- /dev/null
+++ b/src/plugins/nonebot_bison/script/cli.py
@@ -0,0 +1,160 @@
+import json
+import time
+import importlib
+from pathlib import Path
+from types import ModuleType
+from typing import Any, TypeVar
+from functools import wraps, partial
+from collections.abc import Callable, Coroutine
+
+from nonebot.log import logger
+from nonebot.compat import model_dump
+
+from ..scheduler.manager import init_scheduler
+from ..config.subs_io.nbesf_model import v1, v2
+from ..config.subs_io import subscribes_export, subscribes_import
+
+try:
+ from typing_extensions import ParamSpec
+
+ import anyio
+ import click
+except ImportError as e: # pragma: no cover
+ raise ImportError("请使用 `pip install nonebot-bison[cli]` 安装所需依赖") from e
+
+
+def import_yaml_module() -> ModuleType:
+ try:
+ pyyaml = importlib.import_module("yaml")
+ except ImportError as e:
+ raise ImportError("请使用 `pip install nonebot-bison[yaml]` 安装所需依赖") from e
+
+ return pyyaml
+
+
+P = ParamSpec("P")
+R = TypeVar("R")
+
+
+def run_sync(func: Callable[P, R]) -> Callable[P, Coroutine[Any, Any, R]]:
+ @wraps(func)
+ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
+ return await anyio.to_thread.run_sync(partial(func, *args, **kwargs))
+
+ return wrapper
+
+
+def run_async(func: Callable[P, Coroutine[Any, Any, R]]) -> Callable[P, R]:
+ @wraps(func)
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
+ return anyio.from_thread.run(partial(func, *args, **kwargs))
+
+ return wrapper
+
+
+@click.group()
+def cli():
+ """Nonebot Bison CLI"""
+ pass
+
+
+def path_init(ctx, param, value):
+ if not value:
+ export_path = Path.cwd()
+ else:
+ export_path = Path(value)
+
+ return export_path
+
+
+@cli.command(help="导出Nonebot Bison Exchangable Subcribes File", name="export")
+@click.option("--path", "-p", default=None, callback=path_init, help="导出路径, 如果不指定,则默认为工作目录")
+@click.option(
+ "--format",
+ default="json",
+ type=click.Choice(["json", "yaml", "yml"]),
+ help="指定导出格式[json, yaml],默认为 json",
+)
+@run_async
+async def subs_export(path: Path, format: str):
+ await init_scheduler()
+
+ export_file = path / f"bison_subscribes_export_{int(time.time())}.{format}"
+
+ logger.info("正在获取订阅信息...")
+ export_data: v2.SubGroup = await subscribes_export(lambda x: x)
+
+ with export_file.open("w", encoding="utf-8") as f:
+ match format:
+ case "yaml" | "yml":
+ logger.info("正在导出为yaml...")
+
+ pyyaml = import_yaml_module()
+ # 由于 nbesf v2 中的user_target使用了AllSupportedPlatformTarget, 因此不能使用safe_dump
+ # 下文引自 https://pyyaml.org/wiki/PyYAMLDocumentation
+ # safe_dump(data, stream=None) serializes the given Python object into the stream.
+ # If stream is None, it returns the produced stream.
+ # safe_dump produces only standard YAML tags and cannot represent an arbitrary Python object.
+ # 进行以下曲线救国方案
+ json_data = json.dumps(model_dump(export_data), ensure_ascii=False)
+ yaml_data = pyyaml.safe_load(json_data)
+ pyyaml.safe_dump(yaml_data, f, sort_keys=False)
+
+ case "json":
+ logger.info("正在导出为json...")
+
+ json.dump(model_dump(export_data), f, indent=4, ensure_ascii=False)
+
+ case _:
+ raise click.BadParameter(message=f"不支持的导出格式: {format}")
+
+ logger.success(f"导出完毕!已导出到 {path} ")
+
+
+@cli.command(help="从Nonebot Biosn Exchangable Subscribes File导入订阅", name="import")
+@click.option("--path", "-p", required=True, help="导入文件名")
+@click.option(
+ "--format",
+ default="json",
+ type=click.Choice(["json", "yaml", "yml"]),
+ help="指定导入格式[json, yaml],默认为 json",
+)
+@run_async
+async def subs_import(path: str, format: str):
+ await init_scheduler()
+
+ import_file_path = Path(path)
+ assert import_file_path.is_file(), "该路径不是文件!"
+
+ with import_file_path.open("r", encoding="utf-8") as f:
+ match format:
+ case "yaml" | "yml":
+ logger.info("正在从yaml导入...")
+
+ pyyaml = import_yaml_module()
+ import_items = pyyaml.safe_load(f)
+
+ case "json":
+ logger.info("正在从json导入...")
+
+ import_items = json.load(f)
+
+ case _:
+ raise click.BadParameter(message=f"不支持的导入格式: {format}")
+
+ assert isinstance(import_items, dict)
+ ver = int(import_items.get("version", 0))
+ logger.info(f"NBESF版本: {ver}")
+ match ver:
+ case 1:
+ nbesf_data = v1.nbesf_parser(import_items)
+ case 2:
+ nbesf_data = v2.nbesf_parser(import_items)
+ case _:
+ raise NotImplementedError("不支持的NBESF版本")
+
+ await subscribes_import(nbesf_data)
+
+
+def main():
+ anyio.run(run_sync(cli)) # pragma: no cover
diff --git a/src/plugins/nonebot_bison/send.py b/src/plugins/nonebot_bison/send.py
new file mode 100644
index 00000000..b48127da
--- /dev/null
+++ b/src/plugins/nonebot_bison/send.py
@@ -0,0 +1,80 @@
+import asyncio
+from collections import deque
+
+from nonebot.log import logger
+from nonebot_plugin_saa.auto_select_bot import refresh_bots
+from nonebot.adapters.onebot.v11.exception import ActionFailed
+from nonebot_plugin_saa import MessageFactory, PlatformTarget, AggregatedMessageFactory
+
+from .plugin_config import plugin_config
+
+Sendable = MessageFactory | AggregatedMessageFactory
+
+QUEUE: deque[tuple[PlatformTarget, Sendable, int]] = deque()
+
+MESSGE_SEND_INTERVAL = 1.5
+
+
+async def _do_send(send_target: PlatformTarget, msg: Sendable):
+ try:
+ await msg.send_to(send_target)
+ except ActionFailed: # TODO: catch exception of other adapters
+ await refresh_bots()
+ logger.warning("send msg failed, refresh bots")
+
+
+async def do_send_msgs():
+ if not QUEUE:
+ return
+ while True:
+ # why read from queue then pop item from queue?
+ # if there is only 1 item in queue, pop it and await send
+ # the length of queue will be 0.
+ # At that time, adding items to queue will trigger a new execution of this func, which is not expected.
+ # So, read from queue first then pop from it
+ send_target, msg_factory, retry_time = QUEUE[0]
+ try:
+ await _do_send(send_target, msg_factory)
+ except Exception as e:
+ await asyncio.sleep(MESSGE_SEND_INTERVAL)
+ QUEUE.popleft()
+ if retry_time > 0:
+ QUEUE.appendleft((send_target, msg_factory, retry_time - 1))
+ else:
+ msg_str = str(msg_factory)
+ if len(msg_str) > 50:
+ msg_str = msg_str[:50] + "..."
+ logger.warning(f"send msg err {e} {msg_str}")
+ else:
+ # sleeping after popping may also cause re-execution error like above mentioned
+ await asyncio.sleep(MESSGE_SEND_INTERVAL)
+ QUEUE.popleft()
+ finally:
+ if not QUEUE:
+ return
+
+
+async def _send_msgs_dispatch(send_target: PlatformTarget, msg: Sendable):
+ if plugin_config.bison_use_queue:
+ QUEUE.append((send_target, msg, plugin_config.bison_resend_times))
+ # len(QUEUE) before append was 0
+ if len(QUEUE) == 1:
+ asyncio.create_task(do_send_msgs())
+ else:
+ await _do_send(send_target, msg)
+
+
+async def send_msgs(send_target: PlatformTarget, msgs: list[MessageFactory]):
+ if not plugin_config.bison_use_pic_merge:
+ for msg in msgs:
+ await _send_msgs_dispatch(send_target, msg)
+ return
+ msgs = msgs.copy()
+ if plugin_config.bison_use_pic_merge == 1:
+ await _send_msgs_dispatch(send_target, msgs.pop(0))
+ if msgs:
+ if len(msgs) == 1: # 只有一条消息序列就不合并转发
+ await _send_msgs_dispatch(send_target, msgs.pop(0))
+ else:
+ forward_message = AggregatedMessageFactory(list(msgs))
+ await _send_msgs_dispatch(send_target, forward_message)
diff --git a/src/plugins/nonebot_bison/sub_manager/__init__.py b/src/plugins/nonebot_bison/sub_manager/__init__.py
new file mode 100644
index 00000000..9fea02bb
--- /dev/null
+++ b/src/plugins/nonebot_bison/sub_manager/__init__.py
@@ -0,0 +1,120 @@
+import asyncio
+from datetime import datetime
+
+from nonebot import on_command
+from nonebot.typing import T_State
+from nonebot.matcher import Matcher
+from nonebot.rule import Rule, to_me
+from nonebot.permission import SUPERUSER
+from nonebot_plugin_saa import TargetQQGroup
+from nonebot.params import ArgStr, ArgPlainText
+from nonebot.adapters import Bot, MessageTemplate
+from nonebot.adapters.onebot.v11.event import PrivateMessageEvent
+
+from .add_sub import do_add_sub
+from .del_sub import do_del_sub
+from .query_sub import do_query_sub
+from .utils import common_platform, admin_permission, gen_handle_cancel, configurable_to_me, set_target_user_info
+
+add_sub_matcher = on_command(
+ "添加订阅",
+ rule=configurable_to_me,
+ permission=admin_permission(),
+ priority=5,
+ block=True,
+)
+add_sub_matcher.handle()(set_target_user_info)
+do_add_sub(add_sub_matcher)
+
+
+query_sub_matcher = on_command("查询订阅", rule=configurable_to_me, priority=5, block=True)
+query_sub_matcher.handle()(set_target_user_info)
+do_query_sub(query_sub_matcher)
+
+
+del_sub_matcher = on_command(
+ "删除订阅",
+ rule=configurable_to_me,
+ permission=admin_permission(),
+ priority=5,
+ block=True,
+)
+del_sub_matcher.handle()(set_target_user_info)
+do_del_sub(del_sub_matcher)
+
+group_manage_matcher = on_command("群管理", rule=to_me(), permission=SUPERUSER, priority=4, block=True)
+
+group_handle_cancel = gen_handle_cancel(group_manage_matcher, "已取消")
+
+
+@group_manage_matcher.handle()
+async def send_group_list(bot: Bot, event: PrivateMessageEvent, state: T_State):
+ groups = await bot.call_api("get_group_list")
+ res_text = "请选择需要管理的群:\n"
+ group_number_idx = {}
+ for idx, group in enumerate(groups, 1):
+ group_number_idx[idx] = group["group_id"]
+ res_text += f'{idx}. {group["group_id"]} - {group["group_name"]}\n'
+ res_text += "请输入左侧序号\n中止操作请输入'取消'"
+ # await group_manage_matcher.send(res_text)
+ state["_prompt"] = res_text
+ state["group_number_idx"] = group_number_idx
+
+
+@group_manage_matcher.got("group_idx", MessageTemplate("{_prompt}"), [group_handle_cancel])
+async def do_choose_group_number(state: T_State, event: PrivateMessageEvent, group_idx: str = ArgPlainText()):
+ group_number_idx: dict[int, int] = state["group_number_idx"]
+ assert group_number_idx
+ idx = int(group_idx)
+ if idx not in group_number_idx.keys():
+ await group_manage_matcher.reject("请输入正确序号")
+ state["group_idx"] = idx
+ group_number_idx: dict[int, int] = state["group_number_idx"]
+ idx: int = state["group_idx"]
+ group_id = group_number_idx[idx]
+ state["target_user_info"] = TargetQQGroup(group_id=group_id)
+
+
+@group_manage_matcher.got("command", "请输入需要使用的命令:添加订阅,查询订阅,删除订阅,取消", [group_handle_cancel])
+async def do_dispatch_command(
+ bot: Bot,
+ event: PrivateMessageEvent,
+ state: T_State,
+ matcher: Matcher,
+ command: str = ArgStr(),
+):
+ if command not in {"添加订阅", "查询订阅", "删除订阅", "取消"}:
+ await group_manage_matcher.reject("请输入正确的命令")
+ permission = await matcher.update_permission(bot, event)
+ new_matcher = Matcher.new(
+ "message",
+ Rule(),
+ permission,
+ handlers=None,
+ temp=True,
+ priority=0,
+ block=True,
+ plugin=matcher.plugin,
+ module=matcher.module,
+ expire_time=datetime.now(),
+ default_state=matcher.state,
+ default_type_updater=matcher.__class__._default_type_updater,
+ default_permission_updater=matcher.__class__._default_permission_updater,
+ )
+ if command == "查询订阅":
+ do_query_sub(new_matcher)
+ elif command == "添加订阅":
+ do_add_sub(new_matcher)
+ else:
+ do_del_sub(new_matcher)
+ new_matcher_ins = new_matcher()
+ asyncio.create_task(new_matcher_ins.run(bot, event, state))
+
+
+__all__ = [
+ "common_platform",
+ "add_sub_matcher",
+ "query_sub_matcher",
+ "del_sub_matcher",
+ "group_manage_matcher",
+]
diff --git a/src/plugins/nonebot_bison/sub_manager/add_sub.py b/src/plugins/nonebot_bison/sub_manager/add_sub.py
new file mode 100644
index 00000000..7bf7f1e4
--- /dev/null
+++ b/src/plugins/nonebot_bison/sub_manager/add_sub.py
@@ -0,0 +1,156 @@
+import contextlib
+
+from nonebot.typing import T_State
+from nonebot.matcher import Matcher
+from nonebot.params import Arg, ArgPlainText
+from nonebot.adapters import Message, MessageTemplate
+from nonebot_plugin_saa import Text, PlatformTarget, SupportedAdapters
+
+from ..types import Target
+from ..config import config
+from ..apis import check_sub_target
+from ..platform import Platform, platform_manager
+from ..config.db_config import SubscribeDupException
+from .utils import common_platform, ensure_user_info, gen_handle_cancel
+
+
+def do_add_sub(add_sub: type[Matcher]):
+ handle_cancel = gen_handle_cancel(add_sub, "已中止订阅")
+
+ add_sub.handle()(ensure_user_info(add_sub))
+
+ @add_sub.handle()
+ async def init_promote(state: T_State):
+ state["_prompt"] = (
+ "请输入想要订阅的平台,目前支持,请输入冒号左边的名称:\n"
+ + "".join(
+ [f"{platform_name}: {platform_manager[platform_name].name}\n" for platform_name in common_platform]
+ )
+ + "要查看全部平台请输入:“全部”\n中止订阅过程请输入:“取消”"
+ )
+
+ @add_sub.got("platform", MessageTemplate("{_prompt}"), [handle_cancel])
+ async def parse_platform(state: T_State, platform: str = ArgPlainText()) -> None:
+ if platform == "全部":
+ message = "全部平台\n" + "\n".join(
+ [f"{platform_name}: {platform.name}" for platform_name, platform in platform_manager.items()]
+ )
+ await add_sub.reject(message)
+ elif platform == "取消":
+ await add_sub.finish("已中止订阅")
+ elif platform in platform_manager:
+ state["platform"] = platform
+ else:
+ await add_sub.reject("平台输入错误")
+
+ @add_sub.handle()
+ async def prepare_get_id(matcher: Matcher, state: T_State):
+ cur_platform = platform_manager[state["platform"]]
+ if cur_platform.has_target:
+ state["_prompt"] = (
+ ("1." + cur_platform.parse_target_promot + "\n2.") if cur_platform.parse_target_promot else ""
+ ) + "请输入订阅用户的id\n查询id获取方法请回复:“查询”"
+ else:
+ matcher.set_arg("raw_id", None) # type: ignore
+ state["id"] = "default"
+ state["name"] = await check_sub_target(state["platform"], Target(""))
+
+ @add_sub.got("raw_id", MessageTemplate("{_prompt}"), [handle_cancel])
+ async def got_id(state: T_State, raw_id: Message = Arg()):
+ raw_id_text = raw_id.extract_plain_text()
+ try:
+ if raw_id_text == "查询":
+ url = "https://nonebot-bison.netlify.app/usage/#%E6%89%80%E6%94%AF%E6%8C%81%E5%B9%B3%E5%8F%B0%E7%9A%84-uid"
+ msg = Text(url)
+ with contextlib.suppress(ImportError):
+ from nonebot.adapters.onebot.v11 import MessageSegment
+
+ title = "Bison所支持的平台UID"
+ content = "查询相关平台的uid格式或获取方式"
+ image = "https://s3.bmp.ovh/imgs/2022/03/ab3cc45d83bd3dd3.jpg"
+ msg.overwrite(
+ SupportedAdapters.onebot_v11,
+ MessageSegment.share(url=url, title=title, content=content, image=image),
+ )
+ await msg.reject()
+ platform = platform_manager[state["platform"]]
+ with contextlib.suppress(ImportError):
+ from nonebot.adapters.onebot.v11 import Message
+ from nonebot.adapters.onebot.v11.utils import unescape
+
+ if isinstance(raw_id, Message):
+ raw_id_text = unescape(raw_id_text)
+ raw_id_text = await platform.parse_target(raw_id_text)
+ name = await check_sub_target(state["platform"], raw_id_text)
+ if not name:
+ await add_sub.reject("id输入错误")
+ state["id"] = raw_id_text
+ state["name"] = name
+ except Platform.ParseTargetException:
+ await add_sub.reject("不能从你的输入中提取出id,请检查你输入的内容是否符合预期")
+ else:
+ await add_sub.send(
+ f"即将订阅的用户为:{state['platform']} {state['name']} {state['id']}\n如有错误请输入“取消”重新订阅"
+ )
+
+ @add_sub.handle()
+ async def prepare_get_categories(matcher: Matcher, state: T_State):
+ if not platform_manager[state["platform"]].categories:
+ matcher.set_arg("raw_cats", None) # type: ignore
+ state["cats"] = []
+ return
+ state["_prompt"] = "请输入要订阅的类别,以空格分隔,支持的类别有:{}".format(
+ " ".join(list(platform_manager[state["platform"]].categories.values()))
+ )
+
+ @add_sub.got("raw_cats", MessageTemplate("{_prompt}"), [handle_cancel])
+ async def parser_cats(state: T_State, raw_cats: Message = Arg()):
+ raw_cats_text = raw_cats.extract_plain_text()
+ res = []
+ if platform_manager[state["platform"]].categories:
+ for cat in raw_cats_text.split():
+ if cat not in platform_manager[state["platform"]].reverse_category:
+ await add_sub.reject(f"不支持 {cat}")
+ res.append(platform_manager[state["platform"]].reverse_category[cat])
+ state["cats"] = res
+
+ @add_sub.handle()
+ async def prepare_get_tags(matcher: Matcher, state: T_State):
+ if not platform_manager[state["platform"]].enable_tag:
+ matcher.set_arg("raw_tags", None) # type: ignore
+ state["tags"] = []
+ return
+ state["_prompt"] = (
+ '请输入要订阅/屏蔽的标签(不含#号)\n多个标签请使用空格隔开\n订阅所有标签输入"全部标签"\n具体规则回复"详情"'
+ )
+
+ @add_sub.got("raw_tags", MessageTemplate("{_prompt}"), [handle_cancel])
+ async def parser_tags(state: T_State, raw_tags: Message = Arg()):
+ raw_tags_text = raw_tags.extract_plain_text()
+ if raw_tags_text == "详情":
+ await add_sub.reject(
+ "订阅标签直接输入标签内容\n"
+ "屏蔽标签请在标签名称前添加~号\n"
+ "详见https://nonebot-bison.netlify.app/usage/#%E5%B9%B3%E5%8F%B0%E8%AE%A2%E9%98%85%E6%A0%87%E7%AD%BE-tag"
+ )
+ if raw_tags_text in ["全部标签", "全部", "全标签"]:
+ state["tags"] = []
+ else:
+ state["tags"] = raw_tags_text.split()
+
+ @add_sub.handle()
+ async def add_sub_process(state: T_State, user: PlatformTarget = Arg("target_user_info")):
+ try:
+ await config.add_subscribe(
+ user=user,
+ target=state["id"],
+ target_name=state["name"],
+ platform_name=state["platform"],
+ cats=state.get("cats", []),
+ tags=state.get("tags", []),
+ )
+ except SubscribeDupException:
+ await add_sub.finish(f"添加 {state['name']} 失败: 已存在该订阅")
+ except Exception as e:
+ await add_sub.finish(f"添加 {state['name']} 失败: {e}")
+ await add_sub.finish("添加 {} 成功".format(state["name"]))
diff --git a/src/plugins/nonebot_bison/sub_manager/del_sub.py b/src/plugins/nonebot_bison/sub_manager/del_sub.py
new file mode 100644
index 00000000..c1003c28
--- /dev/null
+++ b/src/plugins/nonebot_bison/sub_manager/del_sub.py
@@ -0,0 +1,52 @@
+from nonebot.typing import T_State
+from nonebot.matcher import Matcher
+from nonebot.params import Arg, EventPlainText
+from nonebot_plugin_saa import MessageFactory, PlatformTarget
+
+from ..config import config
+from ..types import Category
+from ..utils import parse_text
+from ..platform import platform_manager
+from .utils import ensure_user_info, gen_handle_cancel
+
+
+def do_del_sub(del_sub: type[Matcher]):
+ handle_cancel = gen_handle_cancel(del_sub, "删除中止")
+
+ del_sub.handle()(ensure_user_info(del_sub))
+
+ @del_sub.handle()
+ async def send_list(state: T_State, user_info: PlatformTarget = Arg("target_user_info")):
+ sub_list = await config.list_subscribe(user_info)
+ if not sub_list:
+ await del_sub.finish("暂无已订阅账号\n请使用“添加订阅”命令添加订阅")
+ res = "订阅的帐号为:\n"
+ state["sub_table"] = {}
+ for index, sub in enumerate(sub_list, 1):
+ state["sub_table"][index] = {
+ "platform_name": sub.target.platform_name,
+ "target": sub.target.target,
+ }
+ res += f"{index} {sub.target.platform_name} {sub.target.target_name} {sub.target.target}\n"
+ platform = platform_manager[sub.target.platform_name]
+ if platform.categories:
+ res += " [{}]".format(", ".join(platform.categories[Category(x)] for x in sub.categories))
+ if platform.enable_tag:
+ res += " {}".format(", ".join(sub.tags))
+ res += "\n"
+ res += "请输入要删除的订阅的序号\n输入'取消'中止"
+ await MessageFactory(await parse_text(res)).send()
+
+ @del_sub.receive(parameterless=[handle_cancel])
+ async def do_del(
+ state: T_State,
+ index_str: str = EventPlainText(),
+ user_info: PlatformTarget = Arg("target_user_info"),
+ ):
+ try:
+ index = int(index_str)
+ await config.del_subscribe(user_info, **state["sub_table"][index])
+ except Exception:
+ await del_sub.reject("删除错误")
+ else:
+ await del_sub.finish("删除成功")
diff --git a/src/plugins/nonebot_bison/sub_manager/query_sub.py b/src/plugins/nonebot_bison/sub_manager/query_sub.py
new file mode 100644
index 00000000..5bce8122
--- /dev/null
+++ b/src/plugins/nonebot_bison/sub_manager/query_sub.py
@@ -0,0 +1,28 @@
+from nonebot.params import Arg
+from nonebot.matcher import Matcher
+from nonebot_plugin_saa import MessageFactory, PlatformTarget
+
+from ..config import config
+from ..types import Category
+from ..utils import parse_text
+from .utils import ensure_user_info
+from ..platform import platform_manager
+
+
+def do_query_sub(query_sub: type[Matcher]):
+ query_sub.handle()(ensure_user_info(query_sub))
+
+ @query_sub.handle()
+ async def _(user_info: PlatformTarget = Arg("target_user_info")):
+ sub_list = await config.list_subscribe(user_info)
+ res = "订阅的帐号为:\n"
+ for sub in sub_list:
+ res += f"{sub.target.platform_name} {sub.target.target_name} {sub.target.target}"
+ platform = platform_manager[sub.target.platform_name]
+ if platform.categories:
+ res += " [{}]".format(", ".join(platform.categories[Category(x)] for x in sub.categories))
+ if platform.enable_tag:
+ res += " {}".format(", ".join(sub.tags))
+ res += "\n"
+ await MessageFactory(await parse_text(res)).send()
+ await query_sub.finish()
diff --git a/src/plugins/nonebot_bison/sub_manager/utils.py b/src/plugins/nonebot_bison/sub_manager/utils.py
new file mode 100644
index 00000000..d0694967
--- /dev/null
+++ b/src/plugins/nonebot_bison/sub_manager/utils.py
@@ -0,0 +1,62 @@
+import contextlib
+from typing import Annotated
+
+from nonebot.rule import Rule
+from nonebot.adapters import Event
+from nonebot.typing import T_State
+from nonebot.matcher import Matcher
+from nonebot.permission import SUPERUSER
+from nonebot_plugin_saa import extract_target
+from nonebot.params import Depends, EventToMe, EventPlainText
+
+from ..platform import platform_manager
+from ..plugin_config import plugin_config
+
+
+def _configurable_to_me(to_me: bool = EventToMe()):
+ if plugin_config.bison_to_me:
+ return to_me
+ else:
+ return True
+
+
+configurable_to_me = Rule(_configurable_to_me)
+
+common_platform = [
+ p.platform_name
+ for p in filter(
+ lambda platform: platform.enabled and platform.is_common,
+ platform_manager.values(),
+ )
+]
+
+
+def gen_handle_cancel(matcher: type[Matcher], message: str):
+ async def _handle_cancel(text: Annotated[str, EventPlainText()]):
+ if text == "取消":
+ await matcher.finish(message)
+
+ return Depends(_handle_cancel)
+
+
+def ensure_user_info(matcher: type[Matcher]):
+ async def _check_user_info(state: T_State):
+ if not state.get("target_user_info"):
+ await matcher.finish("No target_user_info set, this shouldn't happen, please issue")
+
+ return _check_user_info
+
+
+async def set_target_user_info(event: Event, state: T_State):
+ user = extract_target(event)
+ state["target_user_info"] = user
+
+
+def admin_permission():
+ permission = SUPERUSER
+ with contextlib.suppress(ImportError):
+ from nonebot.adapters.onebot.v11.permission import GROUP_ADMIN, GROUP_OWNER
+
+ permission = permission | GROUP_ADMIN | GROUP_OWNER
+
+ return permission
diff --git a/src/plugins/nonebot_bison/theme/__init__.py b/src/plugins/nonebot_bison/theme/__init__.py
new file mode 100644
index 00000000..fef5cb17
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/__init__.py
@@ -0,0 +1,22 @@
+from pathlib import Path
+from pkgutil import iter_modules
+from importlib import import_module
+
+from .types import Theme
+from .registry import theme_manager
+from .types import ThemeRegistrationError
+from .types import ThemeRenderError as ThemeRenderError
+from .types import ThemeRenderUnsupportError as ThemeRenderUnsupportError
+
+_theme_dir = str((Path(__file__).parent / "themes").resolve())
+
+for _, theme, _ in iter_modules([_theme_dir]):
+ theme_module = import_module(f"{__name__}.themes.{theme}")
+
+ if not hasattr(theme_module, "__theme_meta__"):
+ raise ThemeRegistrationError(f"{theme} has no __theme_meta__")
+
+ if not isinstance(theme_module.__theme_meta__, Theme):
+ raise ThemeRegistrationError(f"{theme}'s __theme_meta__ is not a AbstractTheme instance")
+
+ theme_manager.register(theme_module.__theme_meta__)
diff --git a/src/plugins/nonebot_bison/theme/registry.py b/src/plugins/nonebot_bison/theme/registry.py
new file mode 100644
index 00000000..009d4321
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/registry.py
@@ -0,0 +1,36 @@
+from nonebot import logger
+
+from ..plugin_config import plugin_config
+from .types import Theme, ThemeRegistrationError
+
+
+class ThemeManager:
+ __themes: dict[str, Theme] = {}
+
+ def register(self, theme: Theme):
+ logger.trace(f"Registering theme: {theme}")
+ if theme.name in self.__themes:
+ raise ThemeRegistrationError(f"Theme {theme.name} duplicated registration")
+ if theme.need_browser and not plugin_config.bison_theme_use_browser:
+ logger.opt(colors=True).warning(f"Theme {theme.name} requires browser, but not allowed")
+ self.__themes[theme.name] = theme
+ logger.opt(colors=True).success(f"Theme {theme.name} registered")
+
+ def unregister(self, theme_name: str):
+ logger.trace(f"Unregistering theme: {theme_name}")
+ if theme_name not in self.__themes:
+ raise ThemeRegistrationError(f"Theme {theme_name} was not registered")
+ self.__themes.pop(theme_name)
+ logger.opt(colors=True).success(f"Theme {theme_name} unregistered")
+
+ def __getitem__(self, theme: str):
+ return self.__themes[theme]
+
+ def __len__(self):
+ return len(self.__themes)
+
+ def __contains__(self, theme: str):
+ return theme in self.__themes
+
+
+theme_manager = ThemeManager()
diff --git a/src/plugins/nonebot_bison/theme/themes/arknights/__init__.py b/src/plugins/nonebot_bison/theme/themes/arknights/__init__.py
new file mode 100644
index 00000000..ebbd30b7
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/themes/arknights/__init__.py
@@ -0,0 +1,3 @@
+from .build import ArknightsTheme
+
+__theme_meta__ = ArknightsTheme()
diff --git a/src/plugins/nonebot_bison/theme/themes/arknights/build.py b/src/plugins/nonebot_bison/theme/themes/arknights/build.py
new file mode 100644
index 00000000..f126859e
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/themes/arknights/build.py
@@ -0,0 +1,69 @@
+from pathlib import Path
+from dataclasses import dataclass
+from typing import TYPE_CHECKING, Literal
+
+from nonebot_plugin_saa import Text, Image, MessageSegmentFactory
+
+from nonebot_bison.theme import Theme, ThemeRenderError, ThemeRenderUnsupportError
+
+if TYPE_CHECKING:
+ from nonebot_bison.post import Post
+
+
+@dataclass
+class ArkData:
+ announce_title: str
+ content: str
+ banner_image_url: str | Path | None
+
+
+class ArknightsTheme(Theme):
+ """Arknights 公告风格主题
+
+ 需要安装`nonebot_plugin_htmlrender`插件
+ """
+
+ name: Literal["arknights"] = "arknights"
+ need_browser: bool = True
+
+ template_path: Path = Path(__file__).parent / "templates"
+ template_name: str = "announce.html.jinja"
+
+ async def render(self, post: "Post"):
+ from nonebot_plugin_htmlrender import template_to_pic
+
+ if not post.title:
+ raise ThemeRenderUnsupportError("标题为空")
+ if post.images and len(post.images) > 1:
+ raise ThemeRenderUnsupportError("图片数量大于1")
+
+ banner = post.images[0] if post.images else None
+
+ if banner is not None and not isinstance(banner, str | Path):
+ raise ThemeRenderUnsupportError(f"图片类型错误, 期望 str 或 Path, 实际为 {type(banner)}")
+
+ ark_data = ArkData(
+ announce_title=post.title,
+ content=post.content,
+ banner_image_url=banner,
+ )
+
+ try:
+ announce_pic = await template_to_pic(
+ template_path=self.template_path.as_posix(),
+ template_name=self.template_name,
+ templates={
+ "data": ark_data,
+ },
+ pages={
+ "viewport": {"width": 600, "height": 100},
+ "base_url": self.template_path.as_uri(),
+ },
+ )
+ except Exception as e:
+ raise ThemeRenderError(f"渲染文本失败: {e}")
+ msgs: list[MessageSegmentFactory] = []
+ msgs.append(Image(announce_pic))
+ if post.url:
+ msgs.append(Text(f"前往:{post.url}"))
+ return [Image(announce_pic)]
diff --git a/src/plugins/nonebot_bison/theme/themes/arknights/templates/announce.html.jinja b/src/plugins/nonebot_bison/theme/themes/arknights/templates/announce.html.jinja
new file mode 100644
index 00000000..0e71394f
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/themes/arknights/templates/announce.html.jinja
@@ -0,0 +1,31 @@
+
+
+
+
+
+
+
+ 公告
+
+
+
+
+
+
+ {% if data.banner_image_url %}
+
+
+
+ {% endif %}
+
+ {{ data.announce_title }}
+
+
{{ data.content }}
+
+
+
+
+
diff --git a/src/plugins/nonebot_bison/theme/themes/arknights/templates/style.css b/src/plugins/nonebot_bison/theme/themes/arknights/templates/style.css
new file mode 100644
index 00000000..89fb8bb4
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/themes/arknights/templates/style.css
@@ -0,0 +1,107 @@
+/**
+ 引用自 https://ak.hycdn.cn/announce/assets/css/announcement.v_0_1_2.css
+**/
+@media screen and (max-device-width: 480px) {
+ body {
+ -webkit-text-size-adjust: 100%;
+ }
+}
+html {
+ height: 100%;
+}
+body,
+head {
+ margin: 0;
+ padding: 0;
+}
+body {
+ background-color: #313131;
+ min-height: 100%;
+ background-color: #d0d0cf;
+}
+.main {
+ max-width: 980px;
+ font-family: "Microsoft Yahei";
+ width: 100%;
+ margin: auto;
+ font-size: 1rem;
+ min-height: 100%;
+}
+.main .container {
+ min-height: 100%;
+}
+.main .container .standerd-container {
+ padding: 2.72727273%;
+ width: 94.54545455%;
+ margin: auto;
+}
+.main .container .standerd-container .banner-image-container {
+ margin-bottom: 0.8rem;
+}
+.main .container .standerd-container .banner-image-container .banner-image {
+ display: block;
+ width: 100%;
+}
+.main .container .standerd-container .head-title-container {
+ margin: 0;
+ background-image: url(
+ https://ak.hycdn.cn/announce/assets/images/announcement/header.jpg);
+ background-size: cover;
+ position: relative;
+ margin-bottom: 0.6rem;
+}
+.main .container .standerd-container .head-title-container::before {
+ content: "";
+ display: block;
+ width: 100%;
+ padding-top: 6.02564103%;
+}
+.main .container .standerd-container .head-title-container .head-title {
+ padding-left: 0.25rem;
+ color: #fff;
+ font-weight: 500;
+ overflow: hidden;
+ position: absolute;
+ top: 0;
+ width: 100%;
+ height: 100%;
+ box-sizing: border-box;
+ display: flex;
+ justify-content: center;
+ flex-direction: column;
+ font-size: 1rem;
+}
+.main .container .standerd-container .content {
+ line-height: 0.8rem;
+ font-size: 0.6rem;
+}
+.main .container .standerd-container .content h4 {
+ font-size: 110%;
+ margin-block-start: 0.5rem;
+ margin-block-end: 0.5rem;
+}
+.main .container .standerd-container .content p {
+ margin-block-start: 0.25rem;
+ margin-block-end: 0.25rem;
+ min-height: 0.8rem;
+}
+.main .container .standerd-container .content img {
+ max-width: 100%;
+ margin: auto;
+ display: block;
+}
+.main .container .banner-image-container.cover {
+ width: 100%;
+ height: 100%;
+ position: absolute;
+ overflow: hidden;
+}
+.main .container .banner-image-container.cover .cover-jumper {
+ width: 100%;
+ height: 100%;
+ display: block;
+}
+.main .container .banner-image-container.cover .banner-image {
+ width: 100%;
+ height: 100%;
+}
diff --git a/src/plugins/nonebot_bison/theme/themes/basic/__init__.py b/src/plugins/nonebot_bison/theme/themes/basic/__init__.py
new file mode 100644
index 00000000..e54ff7d3
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/themes/basic/__init__.py
@@ -0,0 +1,3 @@
+from .build import BasicTheme
+
+__theme_meta__ = BasicTheme()
diff --git a/src/plugins/nonebot_bison/theme/themes/basic/build.py b/src/plugins/nonebot_bison/theme/themes/basic/build.py
new file mode 100644
index 00000000..93ed4402
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/themes/basic/build.py
@@ -0,0 +1,52 @@
+from typing import TYPE_CHECKING, Literal
+
+from nonebot_plugin_saa import Text, Image, MessageSegmentFactory
+
+from nonebot_bison.theme import Theme
+from nonebot_bison.utils import pic_merge, is_pics_mergable
+
+if TYPE_CHECKING:
+ from nonebot_bison.post import Post
+
+
+class BasicTheme(Theme):
+ """最基本的主题
+
+ 纯文本,应为每个Post必定支持的Theme
+ """
+
+ name: Literal["basic"] = "basic"
+
+ async def render(self, post: "Post") -> list[MessageSegmentFactory]:
+ text = ""
+
+ text += f"{post.title}\n\n" if post.title else ""
+
+ text += post.content if len(post.content) < 500 else f"{post.content[:500]}..."
+
+ if rp := post.repost:
+ text += f"\n--------------\n转发自 {rp.nickname or ''}:\n"
+ text += f"{rp.title}\n\n" if rp.title else ""
+ text += rp.content if len(rp.content) < 500 else f"{rp.content[:500]}..."
+
+ text += "\n--------------\n"
+
+ text += f"来源: {post.platform.name} {post.nickname or ''}\n"
+
+ urls: list[str] = []
+ if rp and rp.url:
+ urls.append(f"转发详情:{rp.url}")
+ if post.url:
+ urls.append(f"详情: {post.url}")
+
+ if urls:
+ text += "\n".join(urls)
+
+ msgs: list[MessageSegmentFactory] = [Text(text)]
+ if post.images:
+ pics = post.images
+ if is_pics_mergable(pics):
+ pics = await pic_merge(list(pics), post.platform.client)
+ msgs.extend(map(Image, pics))
+
+ return msgs
diff --git a/src/plugins/nonebot_bison/theme/themes/brief/__init__.py b/src/plugins/nonebot_bison/theme/themes/brief/__init__.py
new file mode 100644
index 00000000..01633e24
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/themes/brief/__init__.py
@@ -0,0 +1,3 @@
+from .build import BriefTheme
+
+__theme_meta__ = BriefTheme()
diff --git a/src/plugins/nonebot_bison/theme/themes/brief/build.py b/src/plugins/nonebot_bison/theme/themes/brief/build.py
new file mode 100644
index 00000000..2e9bf63b
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/themes/brief/build.py
@@ -0,0 +1,39 @@
+from typing import TYPE_CHECKING, Literal
+
+from nonebot_plugin_saa import Text, Image, MessageSegmentFactory
+
+from nonebot_bison.utils import pic_merge, is_pics_mergable
+from nonebot_bison.theme import Theme, ThemeRenderUnsupportError
+
+if TYPE_CHECKING:
+ from nonebot_bison.post import Post
+
+
+class BriefTheme(Theme):
+ """简报主题,只发送标题、头图(如果有)、URL(如果有)"""
+
+ name: Literal["brief"] = "brief"
+
+ async def render(self, post: "Post") -> list[MessageSegmentFactory]:
+ if not post.title:
+ raise ThemeRenderUnsupportError("Post has no title")
+ text = f"{post.title}\n\n"
+ text += f"来源: {post.platform.name} {post.nickname or ''}{' 的转发' if post.repost else ''}\n"
+
+ urls: list[str] = []
+ if (rp := post.repost) and rp.url:
+ urls.append(f"转发详情: {rp.url}")
+ if post.url:
+ urls.append(f"详情: {post.url}")
+
+ if urls:
+ text += "\n".join(urls)
+
+ msgs: list[MessageSegmentFactory] = [Text(text)]
+ if post.images:
+ pics = post.images
+ if is_pics_mergable(pics):
+ pics = await pic_merge(list(pics), post.platform.client)
+ msgs.append(Image(pics[0]))
+
+ return msgs
diff --git a/src/plugins/nonebot_bison/theme/themes/ceobe_canteen/README.md b/src/plugins/nonebot_bison/theme/themes/ceobe_canteen/README.md
new file mode 100644
index 00000000..d40fb1ab
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/themes/ceobe_canteen/README.md
@@ -0,0 +1,10 @@
+# Jinja模版与LOGO图片说明
+
+## LOGO图片
+
+- `templates/ceobecanteen_logo.png`
+
+### 版权声明
+
+
logo图片采用知识共享署名-非商业性使用-相同方式共享 4.0 国际许可协议进行许可。
+本项目使用已经过 [Ceobe Canteen](https://github.com/Enraged-Dun-Cookie-Development-Team) 授权许可使用。
diff --git a/src/plugins/nonebot_bison/theme/themes/ceobe_canteen/__init__.py b/src/plugins/nonebot_bison/theme/themes/ceobe_canteen/__init__.py
new file mode 100644
index 00000000..dfd5f634
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/themes/ceobe_canteen/__init__.py
@@ -0,0 +1,3 @@
+from .build import CeobeCanteenTheme
+
+__theme_meta__ = CeobeCanteenTheme()
diff --git a/src/plugins/nonebot_bison/theme/themes/ceobe_canteen/build.py b/src/plugins/nonebot_bison/theme/themes/ceobe_canteen/build.py
new file mode 100644
index 00000000..7e66374d
--- /dev/null
+++ b/src/plugins/nonebot_bison/theme/themes/ceobe_canteen/build.py
@@ -0,0 +1,114 @@
+from pathlib import Path
+from datetime import datetime
+from typing import TYPE_CHECKING, Literal
+
+import jinja2
+from pydantic import BaseModel
+from nonebot_plugin_saa import Text, Image, MessageSegmentFactory
+
+from nonebot_bison.compat import model_validator
+from nonebot_bison.theme.utils import convert_to_qr
+from nonebot_bison.theme import Theme, ThemeRenderError, ThemeRenderUnsupportError
+
+if TYPE_CHECKING:
+ from nonebot_bison.post import Post
+
+
+class CeobeInfo(BaseModel):
+ """卡片的信息部分
+
+ datasource: 数据来源
+
+ time: 时间
+ """
+
+ datasource: str
+ time: str
+
+
+class CeoboContent(BaseModel):
+ """卡片的内容部分
+
+ image: 图片链接
+ text: 文字内容
+ """
+
+ image: str | None
+ text: str | None
+
+ @model_validator(mode="before")
+ def check(cls, values):
+ if values["image"] is None and values["text"] is None:
+ raise ValueError("image and text cannot be both None")
+ return values
+
+
+class CeobeCard(BaseModel):
+ info: CeobeInfo
+ content: CeoboContent
+ qr: str | None
+
+
+class CeobeCanteenTheme(Theme):
+ """小刻食堂 分享卡片风格主题
+
+ 需要安装`nonebot_plugin_htmlrender`插件
+ """
+
+ name: Literal["ceobecanteen"] = "ceobecanteen"
+ need_browser: bool = True
+
+ template_path: Path = Path(__file__).parent / "templates"
+ template_name: str = "ceobe_canteen.html.jinja"
+
+ def parse(self, post: "Post") -> CeobeCard:
+ """解析 Post 为 CeobeCard"""
+ if not post.nickname:
+ raise ThemeRenderUnsupportError("post.nickname is None")
+ if not post.timestamp:
+ raise ThemeRenderUnsupportError("post.timestamp is None")
+ info = CeobeInfo(
+ datasource=post.nickname, time=datetime.fromtimestamp(post.timestamp).strftime("%Y-%m-%d %H:%M:%S")
+ )
+
+ head_pic = post.images[0] if post.images else None
+ if head_pic is not None and not isinstance(head_pic, str):
+ raise ThemeRenderUnsupportError("post.images[0] is not str")
+
+ content = CeoboContent(image=head_pic, text=post.content)
+ return CeobeCard(info=info, content=content, qr=convert_to_qr(post.url or "No URL"))
+
+ async def render(self, post: "Post") -> list[MessageSegmentFactory]:
+ ceobe_card = self.parse(post)
+ from nonebot_plugin_htmlrender import get_new_page
+
+ template_env = jinja2.Environment(
+ loader=jinja2.FileSystemLoader(self.template_path),
+ enable_async=True,
+ )
+ template = template_env.get_template(self.template_name)
+ html = await template.render_async(card=ceobe_card)
+ pages = {
+ "viewport": {"width": 1000, "height": 3000},
+ "base_url": self.template_path.as_uri(),
+ }
+ try:
+ async with get_new_page(**pages) as page:
+ await page.goto(self.template_path.as_uri())
+ await page.set_content(html)
+ await page.wait_for_timeout(1)
+ img_raw = await page.locator("#ceobecanteen-card").screenshot(
+ type="png",
+ )
+ except Exception as e:
+ raise ThemeRenderError(f"Render error: {e}") from e
+ msgs: list[MessageSegmentFactory] = [Image(img_raw)]
+
+ text = f"来源: {post.platform.name} {post.nickname or ''}\n"
+ if post.url:
+ text += f"详情: {post.url}"
+ msgs.append(Text(text))
+
+ if post.images:
+ msgs.extend(map(Image, post.images))
+ return msgs
diff --git a/src/plugins/nonebot_bison/theme/themes/ceobe_canteen/templates/bison_logo.jpg b/src/plugins/nonebot_bison/theme/themes/ceobe_canteen/templates/bison_logo.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..9e9580011c8a64f7aee9d59795287bb209bde300
GIT binary patch
literal 108454
zcmdqJ2UwHcnlAiOBorYay#_=;snRK4ocGP9QC<#l$7UaK_DvX^R(yB)6vq=(KB3Nq-S7aprd1C
zV`O4xVP$1K4`%0JW8t{K!pic;n~+hEUPF0~=G-|N7J52*mjCPHtQ}yYrpTsH^uU=;t
zzAJiPT=L=Lr>g3j+PeBL4UL^$U%Pvd-@f+_kBp9uPfSitqtHvszkaW*uB~Ht_x2BP
zhxnu8KkOm{DE_xuq~HJBu)nj5iDVZ!B_#zV=nuQd$OA}&f{F5+fHc(wEi;h4H}gfA
zNNSdA&kHI$Xar?%VOSsd4AHV(l0yk$|1j;JEc=fc7WEHV_E*FH#ja`KJOvr);!!XG
zP~h}-;j_!Y|G$UWSETc6b89m*gG_
zbm>&hmfslRb%s^Vs@9!yA&$CtPc|Fghuxby$jHgu?(fiHv8Z!nG1CG5>xR*b>j|85
zV@$f1uNKL+UC2f)V&S(=Yg-xN!r9jMt-CWP8eiJ|?lL?B46>93L7uce;9FwtJ8hoO
zXW?wymqv*61W0SP$hTCBu>||nopT&XVUi0sfRlz-moKFO>qcJlviL2`9%{`A3rUu{
zFKQ~gY!sO5%{LTvk4WBXTz~os>ME6-Cf;^|C=Q`>9RZw%V6Pug!U&8ujVtDLSDBf>d__umJ>?cc|8vW@_@q8yYfC(tGLc
zfn+-0G)4Ga9OjU9pzB1RI0Cs~tAp|EaKxOx<3NQL?K6;F9KvLcXRfVrLY>_k&wQ%^7?VqZ1c
z;j7B~y6453vO?XXk#|DCw>QOpTQ5v^_2h9&l?=5~1{xvCU)ZzEON@^qojJZ!_LOip
z@)-=A2uG+lE!9Ugvo9^gW~W3hHLZ%vSq?G>c>8>=k<{3TeM|$Y{HERw*Ex|R+{LDL
zN+s!a@Ntd$LldFk#|Vu9Pn?+ZjAN3bc>FFOD{#~>t;L}(wz@+T*7GF0xL8dK>+rR{
zjftSSTp7FkYSUwHOfboxx5_(D3#PSd3mGOzm*|zOafQ~scH-!?p1xk0PXO}iDKF(;
zV$@#^x&5Iqg7)&?j|yKyd@?H3(`>X%?lib_cgKiwUgtb23Gkmwd6mzQ@~mSR9)c?O
zACuc*bj|8VlG!ySf7fW|n$IVHcKN?^Aeeb`oh&_c^mw0@+%UR-rC<8VnC;IJ_hQ8I
zk56MI*J$6nUhb~tu6Oz=w%fB)R<>FH&lDR@OBOgKW#zl
z_fRFg{}QAftUo7sIH1uJp^j^ALJw>yVkQ=AWWqadqZ6&&$CAD^dQ{5Jec^j9tk1U<
zxdXeb_aSWb45%%wo{A4+#7i$MKcqD9rWsI1>nb6QqI1VKV06&MTnN1ylRsZ6TK)^R
zzwPi>(-_Te?lt9bDUo#IkQ~_x7lOb7LbkC!o-x7;U=X**bH3jltyK-!O@I6Nkv&Uz
z^BCjl^Ds4d*gau<8&+mRXZSJu&pfT+_mtNSF6puYsT3*3OB26W@*4bFgpu0ShDIvA
zbaZz(?s>dpBCV3{`}yEpB4=1f8x@i3)noR~L44w!RM6J(;j#5L0PnE=1&aK}i3qK(
zsvp@Jrn@It7YW==VWIt`4)BTw{}ENu8WIF2Oi2fnb^w7#+)n8nBiluvjMyk13TpLn
z0bBRSSgNny<#}NYAYN;ThD{%PAmWQKdY$DmCh@t-aOAJaDGEQ}ip?
zIKxDVvzf!OmPB^AaGt<@8kNlahQs!0J#~eo1Gi&mlFw;xqax4`LaX3HMaT!K(dvEh
zy;;S>oB`noHAqKZC9Vl;d<6k!xr%W@Vh}
zP;gJ%p?Ipp-ibCQWqw+k#|5Ox;wAd){}_76ItYFkL_FLB^X_XfdyGG1v)t#H?7X6_
zgYLB~?opN9_vt>%KC(6on|mi3c8G=h8+bNO3{@X?&
z=Yl3u!)Vut;&0c3&EJKfV2t^~5+;V}t|G*2y+a$mqm@}xDv`u9z%lhHQSOzEZJ_RW
z|CaHuhT*x*S)LVW6F*BjFx-3D9#0iJI_~w(|&yC7-)+g964uB~YorkuPy0
z^c)b{pyL%z7p9DdED2}U&og7sRTP64>glTj`^ma=7p|&3+xhQE6aSIaEcO;6_{QBG
z+o=!h|Hg6uy(9m3{$A1$h1+AYI`Zg^D)3GBq7ADxp9t~}|Cq~^yHQ@-fvQ8
zH;L-$92ZknV!ULk8Og-r2QXx~MKA*Sz*P`HYxS3CB-S482AP)GLxska#Hz8EQgm4PYbklx!3Q$|gsONyj@5fU!G7(C!c}smf_o14mUL3&
zIqsT8fbv;|SWL-u{=WyM;u9~}bf1SmWq`u&M+1S^6oMbusnwWZihw$I=QI36>3-fx@%@$6s1?ve0`+HGJF`lwP{SvVzN>7`m
zsBkV+7*p7g)fo^)FadW#lM|@Y(a&Q>{0{^q?8+|UD$Y}(
z0@FoV@bg?d~VWp>j-_0Do<30JwuV`%e?Gn4k-OcGIjc&x7
zz2#PZ{GZHUBw3p^J+T>X6MuZ%+us-9Y+qA4{~{w~qRvg_Kaf<&GZjQ~|tY7CLZolgP_
zPEzA_Dq82->-VUox1}b0XVdzwxv8{~UXS1$U|p~6g0tf-Q43EY;F>muz=va7`>PHN
zw)Q7eZ&PfI^Q3pyMrs4({hTEuJ{$~g10r*i;
zo>ulQV)6`Fd|2ry872N=M#d}UT(0-QDNt#j!6HU!Otii0|Za7f3p03awlw>
zdCda*x)nlDNEX_)y*M}rV*?2_Uf^I_h#&ZfsAW0F!2@;!tp0D@^JrCR}j(>-HgpJAT3Qx4CS`XuG%J3kl
zFDa7=jHZ30@7UDUoDKHQRYsIOkN)7qAN}oOLuvwqz6Sa%m$?OlYjcx)RB;Ct2c|m%
z&c}ybRf~ox!a4DYXp<=<(u^R8F9;RFB?k$RYB}p}<|{Meq4^odRI5}$py)KMd~k(J
z5hYWakGiW1P_`A`)d+i5^Cm~iYNXu%zQV5?My^4$eLT-XSnagRxh_+n+IYjdNLhj+
z`gODSi8?-ePtQ+6j>w$7e44v#mnRVbFQWm@r56X<-=ApTnMpOCkiXZ=jBA?*P#bu|MFj3(
zkMkgpb>5(K&PM-Th!_w5r4aJTON$MNB6qs>Fi2`=F705AOV)#XQ`(+M0e1jmVZFCofmaJ2J@LJov-ra<-PjLAHI8gQVjMhonNliO+`T?-L)
zU$By&xo>-)9>>$b
z9na#a>dKVqaY}KMx$CA-&2s=;NNTg|wGvH9;$;Kj|?abjJ{br
zwqIIwMZSFY+I9PV!4KE#EFbms5xUIohAt6HnVQ?kmreVh6{%d+z}LU
z5Sis=Mx)Rp{cpTCKA$hLi=K~AtH%YM780(U=mtuv2XtkrN*2=b6%^kH4|P&d)+J*0
zy*nWBh}(!54fU{}Vf?2#1YgCTfhhCsvg9^P%#dr8
zmZHjP{4%@o_S*+rpIY7+PZ+_zzrI^){FL>^uvwy;;^V$-i(knLmX=eGH#ZeBLWW7j
z2kxide*%6b+a&>iGZw^w)7O$)%{xM#wPG&!w)(GCrh^cNVsovOt6g1Zz&Qd~-qnrMVi`X;{$K#2qySwaLbnZecTWuId
z$?<0b-h%m$G_Kvy>haNBQE%e{j&XmM0kzV!vYi1;ZHpfirk@}h^$E(wr^WqhC~a}A
ztOeCqb;>-D+prLe_Jz9NZ$1zhPTvvv;Qk}!PU!~8FZ#?+R9;xS3c4Z>3m;O5K(6G2
zTY>ICsjVg~)k=FfnY{i!I=r@aBi>K-YMHj@+qAC^xr~Y?Za66rzTOZ!5nFQ_qT6Wr
zQ{h-ZPFA-)3ha>b)K9RIb+2S=!I}FW{K}}*-IV7JOLPX0?~z}7d-YmqD`ubt2#MNx)Y8kT1NbKjS`nR~qMS6@}1P@EZ{bUj&1
zC}9)eM7%tZN;m`9&j8nehG>+?R5SZx_miiArMKg2>LYmH7ufdA+BplpxJBTU4@2AC41MB{5h4344iPM0
zOv75#_qqe~QZ=~X6sI{>M%rlG`fw}^x!YJjx$`(FWkhu`4
zQcBrQ#nw*+&1)y*`i!pi!c_XVS#vqB>Mq-7D5<4@gO89X1cCVsKp~@xPCVnM57AU*
zrlmF;n|#u_O%4<@_(Y}+*bU3+ZYK}cW~MUJ>rqNeS`nY907yhW&4<6EO|Hqp_y|Uh
zr&-}{EnwtxwI>=>*h9jNgZ9!oC4{cpM{6F(75+0|!Z--=d4uE`-A!Uku_lX!nT8;-n0CkARTyJ^ntd6lrOjR|a_Faty>XdylXA
z+{6&Kz8|UvgtwPY{uQ?%
zbJu!Rroa8H`U*%+itg`0Iw0r#6P6@Q%!bvJL;Q=@jLO!j6fDIi$
zwmf8}_r8Zb$(DZ-Yx6kidGA~Aw7}AgJ;;DeT)pOtph43Y`*yw-
z!QRgp+v%i0K>&p%?~iVWl36YV1`L8uwu|01E9#`r@Hd;!3#&@~#L@@#`?Od|f
z{fyWBtMRuR*gYh$QsLb+$m44ROjsw{gn-M4ti4Svc>$G!M9}JMIpEqkp?H8%@|H
z+kg3@NLz9}dMrikC3ZUUoEn
zF|y_U^-`c=#7UTY=E}=$^U0vz&;1sn-lNYJ2E(18v9_EIid@cd$z?ytn75PEJALbss;
zitUL(^IiipT6b|qsb(d*_pxhMN%7}2GtW2AEIAWIY=y_lTR91e&DEI4Cjvo12oxg-<;N&6Y9wYb3H%BO+p&q34EVx{)q;4|HW2ALX6DwiQ{z|`Nn
zfKP`*WtkSVi!vOOjKNIN=3;N~&(R!&E2Q~-&Hj>%2T&VR(9dwp&9&cW
zK+F}is=@h(s;~=lNTvkx3Wc8VygfvP*sle=2z%@qPzimw;l4BPU^%*tXiu|?Y}Sc<
zVJyp}c04h9=U+h-yX;Rjcoztqr%knqs}d`zoePX*#n~e9(LhTwd`deBvmo|aX~i3X
za2JqQ7rQ-vsgildR-adrvD@bdLt(c#T-v
zd9IANb>+D;qZQ#+M~XxmZ!}fhA~FF^<8xn5}ZVmcoYtDn={O$ote-#C%^BflXr
z7_?$UTKT1L+5wUDcW~VruJWkLr7eDT?@dE-JkCrL;`g^+)SNw~8DhV*19I2k8M|n?
zDX(fN;iS87ryClt#E-jJ?~)IGL*k3I%`t6H<=bL%Ma<)DF;06k+P}#)MjX!oHCf?c
zgm|;_cB<8}uEB-NQ*#+ix`xv26tfddOFx-4hx|Fj_LcK(^wu{nZ4xc@wVm`
z^JoBX$C@ofzAX{@Nd4Atw!AwS=Z?URd%x;_))HH_P-CfAK%6}AF1}{KzwN>q;IR_}
zV~|`K(XCWWIGAi))8LJm`cW(Cw3zeg(M3iI&VVN6mna%*EZRqCHfM!n?x
zl>`&xWAGx|v0cI}EQUw6vfM*@pu!m}T{q(yXOHYx(&vhx(rUpI{?CA;B~rKoX(+e_
zfPI&wl5{Ekk(15p#d;Td0@nnijFmUV=KYLh?u!`3j=LWnToL3@R8)l{kGFuZjId|G
zKs{zV!~KH|tfSbnE7UhtV_ntH(&-Eciug`&BMv3mJ;)@fN-B;Rxj(FIFrs|nv?#?IVp
zu4$S$IGIttALW|M9+P=dMt4mANRV@RMa{pQgdtYKz9#E^J(ktR)c070ZhXrEnUBd|
z-YSTXZt^x`^aAsEQaT=NlAYv@KWoR(*OfajZt*SL-=!*NtrBXloBMo~qy77Xu%fpI
z<;Q;&&baqfU9m82$JuB97#&{G8xp<`x>=vQ2HKXOi@zvt;zQQH?L|{=b(x|={tQSZ
z@o~OtFYtXR+rnd+$IfGF@=9&jZNKJ|wZ4yvysy{=o{mP#ph9amChZA)#{u^~89d=NH;S-7O=3~*ZC5XhErFt|wfVHr6;7sb8WzMl`a0|^Sg
zmw?~*g=}%2h$36*Ug8H9RUh^h4Y7`*64kak`tLYbpf<&3!zhVkwgCRrr4)5)YV9ui
zrg#WZ$vYajel2~EY`uZ>PJeNmh}R_0reI#tgqrv>c0D9&48JdsrE3e$-t2
z@B(t;MM}f5Vr^LbzoeVC0w-EwW34>+OXJh58D%8+IQpZ#TpP_A|94+iz9jrWX5JQr
z_+{@5nAXD$^RZ&@)^{$%u;GZ3_|lJ+QA&{u9>wVdU#8KNrVxkU$Qzr`hp;KzfyGUB
z4H2xwMqnvArT*m9`D;M9YTVg_iQTScPu5U&r}6ir&g|;o
z3R$^1(iANTo#<6J*4)ZIHezxx{Y3tO12_*{@&5k)Ux=ILXk5za2Q}Ukg~0p%5XRZs
z{*V4IM{3;78h`JaXuc~vu3Yq6P6L0~sNZu9mMjSJ^u+Zgh6D+vEIe6|&oH1%AGze9
zaj*;8EIHBmj^*yQXya)WwLv7n9m2&V-`I;XyS~4sM1JW}R4DZT&+-c+AaJ?ue4ER_
zOuB8fIrb4ndmfmlUpD4gL8f>79wls6_&BD*9FCkD0U7|y?=Xr+A@%I!0p|&
z?!yEq_g^=R#J^o>2{i^xNfOm|^2`XCeJMJZd5&spBgl>F5s9ow)*?DCqNe%#f6GXM
zc(H=0uTVz0(PV!k!|S6S#1O+%gaS=H19Vo=o{BZE}`jCuJ+21x7l~#aQwc*5gG!YPd{h~h4nVp
zBI1auF7;dkvG2WE+mijh5JoA+I#ax786nXTV>prMX7_?XQl(Zq-mLn`+U?k>m*!$a
zZJJD)ZaBqeP^m3H=64pl{Nl=ualeyL6XOPLp79B@IKA7WjP6TNPGY~U>S@No%m6Qe
zzVXYroZis^m};IaE&@
z!t^IN6s-*=hHziLHP4Y%SQr5iVgMHsW=KXqS7uRHZT%xk|GhT%I|fdD{%E{KZ(G$)
zG(qP?nV^du=-M*xTsJwl&i>8}^Sgi&d`T3dihf8z>s_BRW3Y4IB(lv-Swcf`1464I
zVA);Z{`*&d^as(Gbz|kn#fr=c($A9?(==D2_vHbJf1*ar!{g~hl1Gc13v;#APvKPC{4NGF%J0ApvnSECiEm?zkS^R{3
z7Kx$C!59%N6po~toj=UFR}DzfU;Hk}qO_^73sU1cHvbb5-M^!~_!}~iabM0fnCl
z{-o1NFMdBHrg21G)v+<^t}>~X)p%EM$hrcp4cI&?CJsYE@~Jd)MeZM16uCS`pV379
z(wYuh1&f9z!<|VB@5gC@JR~kxkfefhMnEX+OV=iDb(hej&Sn_fGK6~F3t1VDfEv1x
zkTfy0$&4@V%P3s>+)iXO!-(F;K9R`^e-4({d=BIDGrTqwd-a%QaI;RRCDs{v?AZm0
zJ4PnRTO%r`rmo%lmd`FymB0NJ11g8-oUp(JTk!>G?$l(W+^}GEeM7(>yrzDR;%PSd
z)QPe6r6&J{u>|LpixH}MlOZ^rIy?w9)vQ;3T^L(e7iC!gMA9SeZG0rOd$8JHxV7Z(
zaK1m6N;e}rVKw0q1P3hG8Y7=uAwe)H>V1&igFP=H0HW+MVNl-s2?YOz$fLe;{Y=JV`#qzu`iO=R*?o@r-uQzsMx
z*TzD+j@vq$-L&-e22x)NtPGYFQQ}vyBHSBSj
z16DER^6qw4T^4od5Yn}odU<$zVscNDdx0v9J`lDT7q!)_igLeHS9{Ab6Ao7Rtvxmu
z!3q6lN4s50(%^=Z^dL~j9ZoIG(vq*CKPu?Gv`empZ_9N}FW!hgDPHQ?hHYt>*V=JR
zU8&Q#xwqIktNtkc;+SZ`kE>cdOqy(e{M&Y6(_%Oe%)k~q>iUR0LIHWJM&n^#sTFVa
z^!Lf=S!YhFW>|Y0Q<>LjYb^C>u6Z#xA*dv}$TOi$Vo)pwAoYmE#{}gQCHLtkuQ(4<
znZ7f`IPruB&7(Sb7|sAm_toGd=B3(w7(4V4ET)a&Vn*?2%ok5)w=~fg#hTO+(yI!Q
zZ^K7)^1&q}Er-=i2OkJ?822dMsUp2(X6ky|CXz!7PpECvzvA*|S>J#3zW?#;u<+SY
z326zo5P1kRnMPc632OJ3L_9CfXb*K{WItLI{6={?
zSK6!>u5U&8c@0F~hq)8yiEMZ+ER7r@$(9vM%qtH3ByvaETA%o^@cE59Rifmb*Tr@#
zIg~(x(N>Ib`qP5~xM_Hu%?Nvv4U4na?2p@erR_@|VSzBI3xC0X*4oD*~@|!VbTJ
zmv;w-32NEpg!}UJj)|{*g10C=Oz^bN0CbxpWJXpG-5^S}YMaqq}WE>iKJkm0dv`Iw^+*_&R
zz~wV&!${B%Ztb*CH-jTbgC5Vd#4Z!ZXI%OO{G*vCy$5tta*1Gb+UMj-mFG%dUSr_t
zK`w5l!?ktC^ePKOJ8&*&W%CH*pp6G(MvO4A&nVj*xe=1RsO<`CDua86mx!8V(=Dvg
zHKikV8|$H$$#48kWcjz!?thL-yLl-K)d5di)m(_?h{xkrqjbcWvZ)tYY@5orpC`97
zGJs1s5auNA)&TE`?d#412;Rj63H*>xedb|c>NRZk*FM(8?_|PipFSU1h->xBF!+Sk
zQZiC2gT5g6PRqhr*3JM{B6VPH=ehvyMC^)ZnIsE6)`&}%zNBu`1G*Dp4%^hjDy*PO
zHkVKyR>a?iExPlHpBuis{8i-g#^piCvP14Z+4kt)wN3a!n26kg0qC};h-P_ovVw%k
zjb?!y8Ub1dV2`H{nUQoypmqQujz
z__c59syONX!0pLSx3J@s@Gz-Plc|MHF8bcBR)ycYs~$jtkWg>l{rs5kBmltmFJ=2b
zc>W)CN{Fkc?^;z|&wy%VXSqpF0PccY-orptRPPnahpzWO=U0MU)(tuqx%Q7sKdhNs
zwUkL1o#+LY;?iFJT!JMMA$ZHh-vV#0VP>mLKJ>m`R4q{Y
z-u|m?QGPA_JSj{^jf4c|ZG}OtAv}Bb|$U2U}+Vw}v?3
z!fB59uPpOnKfMVqwXULjmYt7_HzIU3Oii8;WMSk47(NGUO@gpviV3nqa77Q4hu|&8
zTwby~Yb^C((r>3G#^_**MOvBR$#B
zle5>n#*a-yq3>dYgz7?m$7DGv=vzomFUE3EC1X
zM6CCvZ64O}t)+p8N_qjvkH1?6%*RRM53p+%S8Pd;XNfEBn`8F+x=zaFQGanH04@hC
zu+9l(aBfu2?fav@b|1-|1T?%p*)laiHmqvY?q+QoEY9R5go$AFwrJ4qn%Qj$B`@fC
zZWvZT;<>((>p2O7$Vu1#rs9k<5NZ}t0LE!6b|-Zn#$kT14+q}5lddO
z1i%_oiqDadPn6&qgP-175HiOC9B=A>di7Tf}Hz16>#F5CPd!E^&&y2ZCAu1EMO%xta2@vcaq8
z9Irj)d&=|-zd@ww3zbZ1V{N9Fy+X<=!(sLu=8IyFjY0OW5|+kW`w%hV4`M^5a9yW`
zH8uVUyR#DCuiI(btL{aPXxQ&eM`*17$!4S&2vddg4r?e96iGto^*v#TW0nULR
z>$;0nDr8w2QRR3M+*MLh1ZK3vFX5ubOGYJ`zT$|S^=Z@sYcY?!-c{b+=r`E7uZQ$Z
z0d-h-!5Q%a?`ITKD(vpeI54su-x1+&+Ugl^SIcWkGf^US1No=MW=nNQaaiF6J59k!
zkQ(9oD=(JJN0-3vMNu?CZ@i&Oi%~Q2i*LQaQ_yOQ%^e2}MHl;YZo>zivcQSEq*l9$
zW-aOV=f`PflLDZ&+u!&3m=_mV31X)yLYdQ?CHQpg8<>;PO_!#M64$l_ki;+H%@@j-
zlIpcms-RH|B)yGmQ*9Hz{u6R>hg`pmy(3swX7
z#k7It2bdSG?rus?sS8B^BVi!_01Z~L253?wAhXZ&b-5>wpoETJldIl^g
z@7PZKD{(8mE30%v)bgU^f!sPOwp61O8@Zp4uN?NPZS+UZXS;G*?B0PMj{R$y^`&wUp>)~X}zVxgub4k(5B7H%y{ZAZ!~7($6IF`yjg21f<@V&F>q3PL)lZGXqt*%
zNRaqN&rpikmiDpaUhz%O4IgL7jg8Pi{fUbN5Tf|5((ukNWBXFPAMEWtFOSsvYZ|z_
zzB=b4n>zQ6wJMmLSi4^P6Mh2&4^zj0I(qh!O;qay)%1fYYBUJ3!k&ZEz2Gym%yfKXvtAyTu`s~_?a%lRV!`cL`x8~I|qzD
z+*-g|e{IZ*yfnJ4CQ&RXUl!nVbT!CQ{CQ+Vt1BgaN#4r~?%O+)sy$o>D_ugE+*-{1
zQv0nyUpAfBPjmwK`wwZu9;JU*i*Ab6gcx;S(7K?<^)wl{dzXX6q#XaHRpuW*|G#&z
zS6=j{^$hE8a9!?`-*8Hur!;
z6l{^5_|?X~6Nw6=n-ob=^Hz(Rdn|#>bAPlVSeW-VDMaccx|G0o8v9tyu-poNvecgs
zcU@wig5u!)Mzz(8yk%t5?M!L;=Q#L|fMwE7#irvn8w|t2i371PIg#$FB
z(&Au&Fs&rViID~JaHkz|IN4`!)Rwf<*l#mj&VwDWzx2*tIJMHM*qDchKviAJ{QRXT
z>QPZWu82<{yK_dG=iqZoo|E;)6U_!?>#cm)*xJPv?76<5<$Wb8Z{8F=6t_2c#g-yt
zpu|_ZpUgr=5+IP2$IhN{3zQy{fred~nd~nvTOP1!9^MY#PTT93jf~u~ef|gb5lJv(
zB3R*3C{N&ujH?Dj49)ALcSV5np1|Xf%2_Smb0J~%Wmc;s{uk^sg$hrknHm@4rZFG;bqofnbX+2&j8e3Rn%M)Q=bUxTVNc!O|+T!nbKqk(=`J
zUnnDz{1cQ&kPy6en%^qrrWzdtoth3#ZqJ~hlU}Nq47b8P!+1t8cPsWlQ@Ex0eruJ@
z>?Dk)iZFc!f}`uz!tM_=lhT}jOL3etvgjxNXg-mlkyOjtP1}Ds?FCh{_yNV3cs^|v
z!n=|P$ca|5;TgSgHLcQGLZ)ZhH*!%QRmUjz_3Zy6K}fMI6)$+_G^fpV1HrP!6|%4f
zWAr<$Hq?*5tJ3UtlRrJ8)p>3NK<4|X`|e)bu5AyEu&!GGn~{~UGw_rJJ8dvHi=)#V@Q
zDCfS>vUbIyCFJ3|5C2@$|NC=j^*VhAb>D!-a(xPv?U)Y1SEE@MrL`O$73-HfIti(v
zTLwU$FdJIn0Fu7SdK3q|CPf@wlpTnerL*Sb@w{F#Rx)k+cUZqkzm{A$QYZE6E4
zU)m{VybvrC8mjoxrG=CONHmOse6ZGhU&_92GRct;HK82sj?a5m=;_
z>RR&QrVS%%61+dA$1>k$8zt1mS6@H0bWdQ6%$c`(`fV+BU?Y5TY4JJ)2%~aQVeToh
zX-M$&caQEf@L9Y-vwrP7Kh6r+)?kKT84hC#baYj5oKSqBPxK!<*eo{Kc2d9piiGrz
z&5-(2SW$UojDhuNy{~5g&+D+Eb3GqVlO2SwQnJ0Xr_1d=}pAwhpUnA(D$U1hSuQ
z7B9m_8uM%Lw*!iivf4rAbJJ_)Gg{w+9-5^n^H2;tn)q}**GB)fL=9Q`d<5iAJSRxj
z9vzyiGzwSsI?CGo9_oR-_g!=e@sNUeT#?-@)vfZe_7nkUbzS+E=!=^9wHrmKxE|l0
zzR`j3%cniWxrRHbH^1}qDbVH1j}lwn!HaayO~gLZ5P{3LI4_`}Ono@J|G}K%h34!$XDeA_k#EPZi1>-cb0SJ$a-&hFZctY{x&7u5K=+Y
z-A1vV%Qmqn>v}l!QZnwswKJgWA_V26#6Mx71Hc_l#0gqhk#;Ee92|ttP<-v-DGj4@
zAC`UjrT)?@)I&jv$la*mUm@pDb8Ll9EW&=O1Z+VtCnrkAegn*FRdg-f=1FPed(m$d
zlrK+q@O}||@6%Rgf!wnAdJ9I7#HW=P6HGC~qQ+UG%qPaPURg}L+C2Wqi3tE=__yoD
z>>ud+_kP#;S021D=rK@U9*1p}JE67|Q8IxGHkH}O
zOP%b`-JURYr92d8r25uBtS{mUO8N3l928=+|-x7T4g|xRNH(=?x#f-W;hholfftvT
z>)uv4kG#iSRTc;<9r^r;1{;nJkJz@W!IpF-Ci_f4G#XICC)Y!Rn?WnK>M|r}(=5g6*DNs3=mk#w
zOkPu%pw*+~SO8Tbpm@urp{0dXtGdzxm+9`Ln-0eavx=>W5xcoL9YHZAnG1j@$G$A*q
zZg}5~t*%$)b@3g@M+6tNGPUb^fP0|)o2%_npWPMt?Q-=b!}5}iK%1_Z=MGo=Q4RiA
zVLyb=fLje&5;Mx_pfydtwj%fJur^m*xeSi}+DwQ@jtBp9MT#gHrEl^+29##i^|bjf
zLQA#~r4DqG*_J3;*~{!cGlynVpw8QR&}^G`SvnC-Mw
z(yj?3{2cYs$z|qi_Rr#1z%|s%)dc{OU8Bc4!x}Gz!t~T(
zl3Lp#ywm7JmmrH-yCPKg#v`a=fLoWFp~)epD%i&nIh=*7!H$Mqz)(FePIN_V!?nw&
zTwR@kz8|rsyNJ)yJtS2Ic|Q0iJ}98vjgNL%O6__jvTSyy?2zgD8*>HESHdzuKtyP_
z-<^a*-C%TevkuxwtkX(j9#Ou|Hf^5e5^J!%z08E5|`rcDKz8AEGth`=TJxiZpgbuO6$}e`?2)eEfOS
z=g3O_^ymob?5#FuDH;MN-Xv6`sT6)=7;b?j->kmx9^C(->3OPTB@Op4zVgvS61xyE
zsOF1sA<_@-H2d0nl(=V?qpic8Cma0hWfZ?9A$P9w>MyB_4U&3DS>Oi4tzyK{d1_2<
zbosQKm0s+lUlRw)Wfjq1g`rK6oG&xfq?HD+VB=5FyO
z*ke*|LI&~(^X}vSV(!hOq5k{7@v)P=?Ar)MA(4F>k}U}dWgAin*~4JW*!LxbNTx`X
zWwP%x_DRZ~HOrvv%vdMJbiY5>b$zevd*A1}f9HGd^E>zV{==Db9Oum9J@41^`B)+0
zeWDyJ-qX0l2xyceOWLjB`&Y=Ji{}nynawP7S;_-{7wbs6XK48OXnE)zT9QQPX)xy|
z*YcudaS1zp*l{$&MUN6SC$q4onF5kzTk!nm6oD5?1Seg=$ip4()q(k;YGEQ@Zk&IX
zvaC94MRvAZ8d
zK+hvSLpk0PCwfr{%_*~qeW4#)gSsr(t~2Y<1`@>z-bh)dCohYTPecDFxQFf^@HbeA
zGOi4Dda1HW(KikW2Jej+sacy1CIPqi}qYe;S`6S1Q
zZY-1L<(v$~z>P)52mS)vaH9;l{*GlIHF2NZ@{QdR
zZhMnVIUccAHJuz7KhGC7T7ZR;EPK^eQWS^73fGm7V=zA7m?JG(%Gf>{`UJmWzjm69
zbYqo1Od$oaRh(f4AY0FdamDu#t-5w0;^}y=VTs;n^kZ~90D-c{U@n}#!d!shP%*Y{
z%~eijiEu6NbfML+Ex$O?Ldn@)9}Tu&m93jaA^G4c#IE*w$4i8s_1UT0%KbdZP*16o
zVWT#y*w6z$@kl9}#==Csg0X*d@%_uQ{r{C*cYgKBn|(!_gw#QW^7_J})E>*?iF0b1P*Y>F9T0JOCvqU$t2f-S0#=V?K%Iv(O
zjPm6o8xiQW@4oVXe)UY4qh)K%~e4nVfn8|^w+i0uB&mcj{Otu
z{A)^!e)s@$@f2%k4
zRt2~LB6#zspSA{K8hQ>PQSnthCyuO{^v+G8^hc@A)C;ftt*l1CJB|WU`ZRQhJ>eRK
zlWx`)@#ULuOrE%aJLBKpMd(=l1H)rKOL#g@c#in)sNLFM{UlE|H7`2tIY0l;r=iQ>
zfkyA7BQW6;(vv;Lk@fe*D#FivibQL|E5U~+pX%iY#rclDg|N5ZN7lkUiJ_M35=4$V
z8*$olb5R52X5UNnIkk8v;g5fdvjAv9_AFm=gD4(&HKXu^?vQE2<5_B|fy*cpZ5w
z&8|z)Y$8+VYjYw2RO*z=0hUNC8G8)fHx>EJbYBA(l)<=3pSP~!91D?@ypv;mo
zm2i;=?s7`tD}20e>iYK8?hh}84g~#hTDQ4;4AEr3?^ugw_F!IO7lAgUVMLAKxbJnG
z&8mW+2NIerE>Wo(4=|BuxPYwEKrX5%fFoGdrXlPLh(r|6xaHZ4|7?g9eN(VGxo+m?
zkx3d2T}X?=ve%)wU}-ud=!#9z$3`W!33;xfF)2Z8<*;MdPiPZ#F2pTuYP1(yn-i~S
zs%`2w-$`K~mAG~_A-UmP|B$LZ9Uni9TCXVxo`lDebTru)`eWJ%5(ov-=#q3!Tf@*(
z`q6i~;^(N1t8&0|liEPS$m6!qg&^{|2YQ~5mLTy6nenYw#6A0IR_#^Q5#2t{59#;h
zK}&CF8Z;+HQ8a*r9{>&+VuNdCwa`laMa?_*V|D{A`F}td#Ofh9)z`5>M}$_J4+tO2
zK=we0Hl{=wlOxYot9Q<+^5KtTY6jIbLia!X1OnjfIt(Br69o4kko(l-$DSsgA&~QJ(f}53>FMp;t
zlzblQPdoFV^>4z
zDt^_o55C#0M$6(oISIar+;Uj7OJhO|V6?|P`A3%BGV}xuw4EMy2MdY#^tRgdV9vD;
z{Q)^+sn_#@VFzYxh9$6+hYRYj=N`7MoMg0t!lLwDVAs0G3ls?NGb<-QS5L3=4FBl;
z;&-*b09*X@UqB&L12LI5@OF`vw+qYsj3Hng{s@tM#11BZ`eZ;Kq?zm*15OEJ2o7S6
zd3U6uP_9fUGM1?HDSdO+SSy?cn87xuoF#j~iH?$_S;VbzUe4FzT{r#fWWGnZUC-is
ze3@$b=ncCziPO!K(0@oK0_UdZmo8_OxN`mI^Vccc(jLAWo1uY+=!Oma;B>VwA?|RP
zq(PJ_)Gcwpsc3qA`7;OImCsasD+v*YQ!^D|xPO1*%aY}c>gH!&E!G!``CiN{o_3=+
z_cA^-+#kFX3YgTgcD*-)SrDS6rIYKhdW8##OWn{Z;uk?RP98oo2s(hNenX8>1YTPb
z>|IG$@6bhE)@7SRAl~Te$~UX5Y~8e-^Pp29o2dW-l+1lrFjo%2gY&qEYiY|K!<#@9
zuKN|ehwV(}o71DV@F&Xd6j{Pf9OfDv!p`0;$JoU>Bf-4qp!4$Sf}ex-1JH7$&L-PV
z6yl;AvY}zK=i}PYO+~cGT4{!<2UlvEM;PZUS0c=*+|Z@hXpsA0{I$re_AH+(r+Kfpwh0#2--)FTC3Wr;L;jj6bHj0R+C?&4?(y_({w
z`4yfAk&i-P
zJ)=ab?(bkI=@P>x>-!C+AfjbQh_12A>2}k5w{_0f{(JTY*k_!5UK-m6=s4tKR5UQH
zCBeh!Q42c>H82MI*RK^f`ie8S^2}jLUy}(q?LVM_a7c)@F#P(kwiL|Jj3^nm!?S1F
znymf_q9Ej#3Wp#9w#~(KF
zODwEt{=#gM49noxzdtjUAjP~{Q|Ean$Rv^(GQqeJ(f1|UzX0HR|7|qG|7&7^x2-i%
z=CuXm>wMd?C3-@izPwEzOXe0mpbh*``i6ZFnGCMRrePnm^Zx*58&-d@?-*7KG79
zow&ZsylUCLjC;mQKn%~qXY%HZWjy?WF=^$z*MU=aBxikr-$uqEj7CPNlA7U7~*%2-p
z$u63@U-x^@c>xTpPcFXTIPVII80X5zPAm6Rk`nepJP~`FZO{F%m(8a
z+MzYE2?74>SsylbjmzvG`Ai0C-_tCJ=b|X#A@xUF{CMw_tp&iZ3ubFI_N;WbegCPf
zRI5-!LQ31}_vbTy4ykuhO;Gp~^;@=V2_N96TK|A3OsK-Erzo%1`w9;>dYXSy(BkQl`1g-!
zPXqJ3x2&E~s83TdN~SAHwK@Kd5?2`e6uxk=wf*J>0t2fVqDu&n%`QThesj|2<|v(7
zw<5}B8ebs8yo~>jj!2unGi(JX0PadVqx?Vxg=)o6K~qp?8A0H
z$%fZymh-T!L=?csM~=gGr|(k62v-t<_{D>s?{@lbWxIn;k%d@QvI=l3Dj)>RTyMrk
z)|r^!4X*IQcdGbk>4!oikqafjzsNy|9?5C;)CJCysW?3Mnqup-08NVlrf-jrPf!%s?I9e{L?t+x(+kT{Br+wp9}oE?N?mAJQs
z5oP3}+BqN&c5gI6Tv0QIT?NW7WU&8D2U>%ONxnV_rR_xqu>W*D6poWH(?6mz0Px<0Zm
z+ml^_ZwU>Kdu$yp9zgc^hM%O&A<%cC4E
z;&=bK2FQcgAq4o9MuKCFkTOM-5Vo0T_2cS_7Y&brLw7!hnf4EJ2Dy`S?hSxkPY&)v
zFoQPjp<87`_SWvGT`2yPm0*$2vZVkuZ&d6M+fc=&~>qOW=0@tzC(
zC;FLHpe>w)mw1}hKKFrFROKY>m44Ot{g#(IiW{qU7Uvelvoe%HW$dM5;c|aRp8X+b
zwa-8KD?y683|8dOXcwko7X5iSpVuofY5l7Y2$)G&Pd}0UKWrGkB?}YsQ-Fv9mE?z<
z_WkF!l(R)qN;5*??N>kVEA41g6C861dmYDyv9++)3Jn13eWKfYPpbD9&M#PUEblh}
z2^$dTn-)!>-4H=de;WNoD0ls=@`$1?pA2!gjTX3#3J41jjh&vpn0H^WurTr->J5F#A>5>Qtr2A?o{uVXSLf{DjX{V@the^X>)K;p*l5WVVn)
zbT(umAImYL)7(I~aL^U3PDugBD|SC82nlBvp7^3jOmEoR{xk0=eXV=C6Nc={tNd`D
zN~YpbquSc;CGR!cfJJaUI(aYxNFPNBLM19#Ms1T+U+YRJmY;jWpm}aOT#cP1nG#(=
z5L@czLgwzd+VdM5{2Qd8H|!PZ*OS?zB5r>9N3zz?P}|xQnkC5eISSKjJbPl^
zyXlFjg9I{NnQ7bm$Z0u+FOQuN-ZGq@wmUw5K2+L(R97@~&oh%A9CaYii6!ptbbZZk
zMzndoyIAa5`_#WrFz&J;L#lr0Yc9}~_
z2tV`+nR>pT#(a3Xwo7f6DR(Rsl7_*qgW0cSR(D!-!U$fYk`A<1p
zZtOTS21EQH$8B+9ZyKX0OnT4bu=q3`?mz%9GLt<>Dw>Pq
zBi{9MZWfP?WmUUsotA6${rDMlLY#aL!JF63CH@y?8|r`aH~;swIe?C+3CN3rLkJUTGPZ-=Av~xGy9R>eO!>>>vkH(i
zt5SfVu+{j48qPt~tnTw&IH(V%XP?|Ij%RLf=L$vTwP4Gk8Kc{g$G>b=72o5ta-jzck%#XllD^mq*km{)G^pCCY^@bTIN-TAAgAAhgk(~OfiX75_}j)$BL@bp;R
zP9xwtZDKo5og6G}$D+l|YHYpQQ~Z1+{M$7bI6PK>G(I-I0J(E;Pf}BvNe`vabNrVh
zyir{4+QQO%VFuq_-@KhaAQw4)Q=+&%aWL^fHQUtAs_EqVEnaGAIr;JF^FLBP-a9v0
z0SdcS1fuo}b9?f4H-|QmDj50Vhp|5)s1w5(IWY+3fmL>yBz;Vq-n+L@k6Bw3}D*;S9
zSM*GL8S;(pE2HNJk8~^TD9nN`npl8xYOYP(Wg5%P>Z=QnD16UC#m!>oS$r`m;DltF>0#b?NT9
zG?Mu3vu_a}5_GZ2k80WyftG74vcbUjffN)f9WQRCL
zo}@Ou*HIubqLi@Iaf6rDTP%IlX7saaXlFM-I4o>VcK>ti=LmY5arYH9Z=y!x$mV-+ANmDV-zU^
z(~u;%mERR0eWR>mfD6QR^LiKp^?b(e7Z+D4
zp;#9|`a8M|$!H5`F#Iwp^DX%fVR|FvLfyAKlPIOOpJlq2<@tX{2%Q>u{KQUew9he<
zs)*pF!D!9<_zf_p?HWbwvNZK?v2-~wF)oYKhNQA9Z&JBIF>uJy5L+jdAh*H<}m^4q0<$8t{_Dt-BZ9j
z3U|Y|nOy(Am9b|!Ch8H4_wACVW?%Xt(u;X~`a;tLNr=M<47SU0IegO^c=gMX6nOkg
z3+q`ec|q^lWBU~v26S;4)X|w(w(uR>4SUw+YUW=8>(YJcux6Wo%j$bXUrhXZS~BO(
zwjb5n!61pfw;s#^zvfxFmaDA7F4p^vtSg)eN&PYAC^mjYaXt<>zn`I8o9jn@HfCKr
z5`G|KxS;O?YyzewlK_5dM*V?Bz=TSE?rL{S%|psFs*O70L{h-HlgDshD81Vz6dx41
zeP_o8b6r5z;DEiK`ZxmRn}YkmJ&PE@a@U!f*;bJt3kpZt@)J3{dtI-a9g-euRxWMJ
z+b{hARZ4$Hu#+W*w3%T7xGrsxIwRQy^U(nghNf3E@Jd7L2U0ZE^$m#wb5iH^Q~6$q
zi6zsSUj==TUP+r4Ukn(GMT^_9Cu0CfyT>w_=q@dEbZg;t_a#Yjz9r9KR29dtB4CrCA%Q=_cG^R{)8G<=yeV+*OPKmy
z%t1Fly%DA82@uuUMngH{O%QERKi%zVEy;hHa2pcO5zH{7G_;B$m_s5&L?6Y#pJZ=|
z#WDvyQux3y?+iwVfCkMHhbSZSRdUHnNk)0Bwhq*X)5kc5XEqkyGdT?WtPSF0SJC-T
z3p0U`sv2mAT#BuPNZ21x)D>HtMG8G_cJ-|`^@?x*2=0|@`Z6$(^*jZq){XH0w(SQo&M+Kdf*(*-zxAnwX(pGE(hz;a(
zHobYBFy`)g*4CT)^<@iNx9v$v$u~Z9wnae4b~GLHEwj1mQ$v9`V|Ghs&NHetKc{#r@^8
z!7qicy-Q6T{44lu#Iru=3`KOH7Y;eK}SHIDd
zs7tBAX^S@#V)L6v?6kIDJw=YbjemS2{veB9`$3!0_O&&;5alXY^jh|DEXrSaHh&!`
zf&zfg4eKcyYDQWSMy-vNd0x8>aE)VKDGaSs)i{n2p1b)a2f{KV{1gA~CP>p@7O^(nB_6x9^D*-9%e+NR
z!3cozc>WXR;m}=Ye8YBaVWJnpM)o4;qY~g6{=amkW|Ufpvb9A!21`|7w1vZW1Ppf6
zi2<_QBE#S|alMqYwb<`K0J;r5U~!CFvD4|!vJbe^q#@Y&+e~$EcAlLaS_r>INPWEs
zWp~6Oi=o{TiFZYpzTXRUf{9mgQ4Rf{D2M(ps)hb1zU(i~%qEvIYP;#ke_B8hch89S
zu3lA1x2*i6rSj+!O7~!n3eFtsNSVlLS&b+(}*;a4q1o
z^Wb|MV5-;CAnV55d6P%w9OFbm6C>p2k=bO9LH*DUMar$dRI@_dx?vcID*W2-`83_{
zsxs;Y5cu~qk2{>mlXW6R)?_}Z1NQ9b)MG`6^5p2zL-dic>+%t-d=>|h8MQbYC
zHA~Y)vp8Xi#V%qkk`Zu#ul_>|NphFt?O#HZE=#XMA0w+_E;nRGu5LTix{R_)u6u0T
zlJ^L>B-v+BlJrsHGlJ@OP3`QKRMqEpI7#qP4dXI
ze6+@;yxBCS_p6Go}KSw+ApI;RD@O{A-!h;mXo`h(6XqGB-+E!vE#_te1S0
zCpjmVdarF8K@t|RM$&uG!&k#f*s1x$#bJcP#T4KJj;p&Q9(={gqQNJ;L*^E4`!z~o
zUvfTO2aJT96KY8Fcr3s~3;y<7&wKPO4{~&W>5=8{bf^?CUMXl_nCc@e-6tVG+>{~f
z7rFbZ)is_M9@op@3Yz}X9x;Nc0rDix*h%ZXBTGhOF1eKzl<&f=;XRP7#>zh+*>=yC
z2*Il4)Vc)}_z%d*n*r`lyj5_iX3UE$y?BWu;fl-)zMozL{T&QHIQPGSg6GNH;!AZU
zu^6^>6x)`}j+fH6Cw3VwX-AH_nKCM_Y8t-nN}GQ`pODcx9oTAg_@*^e#jSeuGpEL%
zOA1EcY70n%j@KN`2wfDWq9T$vZXBp02ucoM=XP{E;`}jVr+{Y`f22~A{eX%_ye&+F
zh{R<>7!hYlncdYNb-mwB8cWC*ni_TEKq5FLUN6*wVhg
zEf;$B3;nhBg@`ookdtf-@Hs_7N$$UP+A+$ch*Cqz8MsL~*Zp_!-&NgePFJsL_DKlu
zC7w9`Gi>CVrs>;~1WlEXYwXfGIfvELwNA}LT6>E&f;;fE(1i@-<2Eg}`F)sn;|Wx`_Ct?awtGe4tAWF*0xlnjna%^Gj@%5L
zDcVOCV>@hZOAgNDQ>a}QA~=7qN{cguYMzF$H#iLp|9l9Ub#s_X`!t?_h)^msQAUew
z4oZq%-~4DRKU!juf5@=*QHo|yP6v2%x*iQjd$T0eqMzKf;m%TBZD_22e$^FQQNQ4m
z*3c!%7!p8E(oPf5Jv$<>KOnPKBK=3r3;3D0&W}o`dxX40RZmuau_#vxHbJhgR3fd&
z&_zbgOL$XP+ooTv+5>haH2mi|s)SCZR{&F?>5pj~@E^WX=ac@f`F&Mz;0-eGnlZkV{rIH#F6xM;i&!P|Vop%Dfi;Y3L?CwfUFGgq;~qsz7r
z$Ltv(A&a}UqYthK^Cp<}He+$cRsOPgg$e
zqie`Pizjj*(ltAx$eRt}f*iLCKVq7lx$+UJxFsEM3O962KKm2fqGtybUc
z;HQ;zom;{Nyb=eiS{wa}gWT{3B#knS$#s$QLzQBe`AV}z86|&;)1+s19H4&;P8Kb_
zuQ|B^XR0Drws_K2Y<)WzyPUmlb<6D%Q^(;Fq5Kah*2Km8*h;WPfsBt6dGCZj6v5P+qBgInIi~H%-nCX1m-m2&I<@otU6NP7^5%WD{d#
z14i(PpXo*8#_FiejBQ9y^VxR=43b*XfPE2rns@HdhlEDrErS)a#muhVRv>SVZ<|O9
zh5CebykVdBj)RqV>9x86PZz
zoul6qPYSfPf6{w(&1aKYA_5zoEK<-qvHl&XUNm(*(;lQHMUz09C7zLsL7vgHSj-K~
z9xZ>3H&A7l7j&^dX|f)EOD_S(Q$q-0jw8*{ULGz#MM
z2c%&^k`G#x&exnitP2fWaoSGT94Wn5fV~{C7NG++u)I%miI$qNgEpW9$g@->Md9ZF
zMI`LiT_UcFB=H@7At<*FO@Dr-4!hD;!Utf*8h=1?E+h{kqzlWVd5xz9-*V7a^H~0K
zWB2pVst*G+W9gd<-$XKggPC=M!<)y`7N=T+2xWKEx+oW3us5vP&;rw=yAv{u;mlt)
zs-2ELQIQ>eIB?0xO(%P#An}m1Yo3IY#Tyn1wrj`p{hq;B9a+w1&?cI_U8#4^i2?ld
z-hyig!6LxF_$_n
z_P{R8y3>ME&GVBzAV9N>ZCyvK`3$I!2#u-53XuFQh62iNU`7rYZJCpT8Q7xzAjb!U
zi?0F}y-AXgp0v%AH7~d>0oonJAL*Z+@x@w|20U+0kHoy;{49pO;ejH~hwnoYw0SXv
zqxf?1v1cvamn816>27`rDWmx;{`?7z$&K-HxZ%6|z`^!4D4W+;X2|T^BL~LMqTkm^
zF8Io#Mf9ZWqA5C@Y@yU!LkPW}6`PJ$UtSvl<8wfC;5k685eJGml2CX@l&Dhg6Ab^d
z?#Cw*EdUDpFIdh0Lyz~@<9};s2rO=^3cm+5!vrm|R^wKJm8l6OAG^f9%#+py4v*iW
zpN!9m4IKzyJXFdXpv;-vFLHkVdEF{PWFZd(v?E<=M&VHWinGSEUQgkY3mU@8DpUyL
z;-~32QT!utRuE9;;PneAKX^@?@4A)5T8yvz{uX-m^)|6g!&6HQ?u!$qvp6qG59mvP
zw3U0OWk9Fj4iEV@@nY`MPRyK2PKSubh>YnARa!lNp2aUQ5x>4HyVB5S|u*a&(iYiAj1?474@TSe&O$A~dPFXrQEH3kH$IgBL*7s6b0$311v+mRlR
zVaC2DNx19IwdnR=5j^9g{q%qu6PY?NC;lQ*QOmCsuqz5-yX4}gT|4KgSLe8g$0}lT
zy*?RyJ3zEHwlEs+(g(^pL4i>gXSj@DYB=>=C}
zJ6S(uzP-^C9(p28x--Bp?_CP>)3|m~KJvltBn6OK=3*BzL%a`Dqlnjf{4z$@ZEJnc
zl4f^fAMdQs4>Ti5mnVAWNjBJ;FZRv1^SG4wEwPN`cQi4Qsf!J*h9@eN5ooOn5H&{)3th~r
zTU?v=$7vpAsus)%`rXQq_c0qVnyO9h?`v12I;J`~1G~^lb0P&-8(9vwv&Wzgr-rV~
z7X*vAq;Kq{eQENJKJJ2L0;1d|->23l!g6}mqd(K{66Jkn{5QF=@@tNJK$EQ-_zXhC
zJ!E!2$?e^0w(KSFW+A+k!Ow?_YAhY9itJEt)%xH_0$c%{b?>A!!zm(=yB(nmtdjI|bi}GbKSkMq3euM(6Be9nnt{ll}LD`2K)k6z~Tn(p%s=bDtCk
z++nBR_jgQtD18+XtTnlis5ElK%#HJOlopKog%qeGUn-mP3)(42Sqx6Fl)(t+{(dmIJX7^MmEnt@nPV7a0RiYF0YLYOS%RZls-7?Y
zluZ>bS9Fu_`iQ4R@s0`uWJDk=ri9VODH24<8WT5&$~{
z&6-T{!!*-G!yljv`Q8!>YzXh0dytvRLsRcgZ8lQa<1gN=4vWloI>~GT5MU6+
zX~bjeFC)^-sZ_o2+dtZ&pv3SE-m
zQw~Y*sT;V;+#zRRpFSb%F(;i%-06a{z)goyH;D#G9_9BNL~o-sU2DYCa@m`KleyS{
z?e7g=t8N}E?4*aYK^>8XB!9ptPZ5D${hY0y1rQxHCoK;L!RWTooX9Pu4R+Qk-9?ib
zPs95+dV0RSGf-VtTZoZZN(99%f+K-MocMw>-5-APfp6tI?YCrHAJ3&=dlr+B7mT}+@PnqtRIWeI>
zNNaT4a+f6Rvzi!Uo&tekH*hS?v@i
z`8dmcC;dPrKqalQB#4g2o^ACeFmz{r%@OdNnO*gB@cPzm_$$CNL+wr~sMn9oVl(sN
zFN}$54+Y@5EOs6g(bG3@jWUDBHCQ6^yH5>zT=idC{leHapBXoIa%9kVft
za`>_Pnm}^^h(CwSWoA!uuIwQ6FTJyzR?0^`G<9Dy%9u>0{>m;9ddPk3|0vY^!~KpR
zH_agBnl~Aj3Y|W$8|YE1M@mtL{HH76UwLmIN7k)<2F_=!Gyc+ypVkrF{RX;U_}t&*
zZvdwd0}dVYiTsFUxCmvuxxtkeQ^m8}_&N2FMv==_Bg@G-C=aCrT3eUXaiXyUsJDUJ
zP}gy4x}WvH?rGq4m88+r#~I-E()g9p7j;-_}YAiK*MF`}wJ<
zp)t`TP%yfx@uB#Hc;atd4}@@gC(==3c>IeTt8$upcNOyqQ{sRROvX+l%_2ih*uPf+
zK+`10Bk&avi-r-+(8W{5*$xQ}nL3`x^NmhkHx`x(UY_8_!!=(mt@H?{&8RGCVS!76
zYG9XSdArfLZq}W}*lYdW@7L;Et3|4h2l@EXt(wyK*N_g3;OH~ii?g23X4Pk9EAWm$
z<~YNetJ%
zA-1ku&-Kb%t*m!7I5*IRUjcWlc!m4g$sWY{!)qj}B}fXk_A5n=cpIfrw(d9Ye&;4K
zh>ta&r-Q-YS!dr8TGJ5N7wnV{Fv3b4S|bJHD-?f=)mOh@&AFzKl_;jg?4muTRW^WqjHc;1
z0eZ>2KN<@a5+?h~=cmhyi%v`~WthG>o%3Pbij;2GR;7#~=tyhCW?YxN)&$VEu+Wra
znf}bg?MiQ5so@u$w^Y_CX0(}6doH5OdyaBMhAqLx!3b?B_3cc_P9(K1ch9f4N?)?a
zc3P+SesiMlP5c7o(i!#VQUuA%Ck7y;o(g;kyQ6gFe{4Ab;l~2cgOJQ`A$7pw|Cfv9?m>#}%=Hnpl5`)fa`yAq-6qY;QWB`o<_w*v
zI!G)~!lQ>6WybHs&WQ`qV+ni4l+n};28eskc^8Jl8tDMYsc#tpV4oN+W-ZbIfrz_3
zWZor{r#&JS{!Ee6M+~4wiB10r@yoLY>NamFT?HZn?JJnjR(1bHIznNUnnyLm2LYajh>^4He
zCbE7yw*RtX3pY~eU#GJd8lwIihhP=JYTa)fQG#iS9N^}6lYA}~aiE09oTz>e%_g2e_snv0d4^BsSVqDw%
z7~AfhuuSw0o;bmuoW5Od9o1$gYc!6?__>IPea~5j`V4S$?W_JytEzm3Z`h<$t}5?Z
zM5T(zwg9!HVy|pRj9-_J1x)J&2*aohwbXh*etDgkl92b=S
zt4G?dmy^>_=c6ih`M>1-|Hjq+?=nKDfwEnokP7y4bMJD$`(@QlEA>0>Yqx!q>OUTO
zYB(q%FFvxaQ0K6cHa3tgoo%_|2F+Fpd*u44rRLML)sFGawv8Dj;s4@u{k!M?>vs&F
zQ4K*Io`Jt+j7V*SV`_{Np^V9uXD!#O;_i0)!DavSS!3atQ1;Ey6TMYrt!q~cl*28z
zcC3(@r=jC|
z(hF2D`S?MU`)OvaAGy+z9t9`3l~bhE~=2vE%gk;)k#f7>mlfH!Jjo_oSI!v
zuB8lV`$|AJ&+-NCqsuUilqr9*6{$x##rM75dzgy#-V)(E`R%yJl&Gk2KKUJoA4pA@
zHH?mU4xbZ)ktVsX=n%aU+$!DtlND7gVy8nDRIHpQlLbIUgrX(GNQyGdR|!i;iX61B
zQk_2xrph>rjJN&r1OBe24Y)x
z^yhPzxn}Pe6v2`ei7&uvW($D@85I>t_=w1j9Bu85zO_Ok){?V)37_705N
zUm+K=^z(LvfhyNZHx(LrhpC$qDOgT&ncqJN2>#30y4RDrKa0xqcO2kC?$S!pE-z(n
z+%61P1CR!e3Iq>aeWKMesH$kRA~`;F=0*lMi&rc70Rz9%F7T?;CXa5Dsus$ifP$)y
z)g37aOQ$1ebip2+Tvm}Ul;Z2XYG5ejBk$l!=5iEF+L_v_U46G(u@hit`mvyETVZy#B~oQL$)LsJ~iTbcSJp4g~jKO29}UdeTZr*+>CL*bd(P$Az|o
z*kVu2_BXyI*bAY2*VHM#_5qL0Q07%74Z+a<`_`cz@#FX2>172tKiAzCO(dAjOdk*Q
zA36|2=1%(~Xc=iZg7|ITvxU%%)_aUmSZDe?2$>x|UyU=g1isx4A)X0kUzVcz^cTqU
zpMO4vU^3k**KG0v_N9GYgU;zHnhp;~u&|E_H0gu{+sF#C;
zvaNy~-_Cnzvx|zk0-OJQ@zj;qB{Q}qKO!X(fHogWmiu$xzfX;H@NXx+q~2pQhE*iX
zJ`*4q(N=GBP`iHVapKt3y?0AP9BLbbZ_)3RWDRDFMk~=ft|lHPPx9Q}KgU~eo&9PC
zhzBfAUpB@;sl?!Vsg#o>a2$Otp@BF;)NwfrDqAT_!o&?Q
z&%P(y>~WHESl6m}g5fWZ?PkD2_Y$&@Ya=3U;zDFz9B)C>+8cX5;!nAUYTLWH8vt~*
zaXL;Bb`JSSa)hZjd0id#(=nT>VmnqY0q4D;uYL{=*|b^caCXsJ56f5>q#Hz?iGKsq
ziZT8_2>!4tyx4uy!U6hy+YC!H-UKE|A=|+`a9@(=TXf`8&z*o-^%I%u
z`U#H4>928P!mS}Z=rW>-4pnpQp`(ZB8h;>IwgGHf
zP_vKkY$TJOyo@m#a*gR+X_!MB0w-=1uom-^<^1tG{HxZ@zj9SOOrn1S#>HSEKLDCp
zt4+B)p*OOnxFHp-99ShIZ@?D#F_|iJ^YSbR;kJSLgzMdzQt7cIp$%u;D2aWX;O`mc&>6*nt?`tm0+f!(sdB
zZ^AvTA4Sw2IDJ_U0lG);e*c}|e{zX3hN`ODm?fVhCSxFkJ1Q?JBG@eMCC3GQX-_Tn
z?v7N%8d{HJ3XF9&!(LYmq5yZ=bV(#x-z(5wQ9f5}smxQ=iC*1*D-U$Cn7D_EL)AjN
zpoxKzOP!Q!BLO-_^ygJm?@HDfkem>OZJfdn%|Ca%*kx3s5C3btVt9!d0n;a30_vAA^T>pyXk^{+cfzi`61&w@tVQla9OyLXW7)e;}3TQ*;i)oZCi(NQnxZl
zKkq*GF%7(H&hP>;G*2Z9c>-Z~6W}Q>Gb?ekZdeNrqGieqO!@)(ML`#;rOXzT=56&}zBOIJ~F#($&UMSzva83oism+X(dh
z0QgEzXeIlZAMr!G47WhW{hZJe(Ffr?f0Ls>_$Jt-AMn`mjfg({=;StHXkzZ1$6wwl
zw&;+F9$B|&<#CHxXBYkr&3DuvL2HevO>fww4_^=|O~jT(tDJTy)ve(UX&H*l_}iaP
zxAfpcsXb}F8!<~6+Jyw~=x2k}B-h(RiXH)VqfgJ@JD(iFwlY@m?sx)4`34Y6o4^_?
zVXIkUNmPD=7%gAl&DeFY7qR=LxyA6_So>h(?tk2<{(pJ=f8?h7&-ZfI?To>$dRi{A
zai#L|&A$&OXcY1(a@yO3L`Oh5Hhr3SHVJ<+j1xgQOC;_^?uykGSo;N*@vcPG1A>*O
zgNZ>{;7D`@>aIOP9Qvo%M*s@OuZ!}@L^?)L6{37C@s~C=<+BLYD47mF>)KPmZ^lV#
zJ|38(P*%DjYkoa`tHNXSpXu(U7nBI)fqn}b47UcH7*0RAm4{1%J8LZjw6xY`o?J7%
z@pwr=n_}+8$EZu|JB0EC?r;k^2-?PGT!QVMdBd)SUgi7Eb^i>8&s1B3Yf!HlJAJn^
zJuGf4M${h--dgEwf+|&(?tSOQOD5atx4ydl-O{8n)T7KLLEI-<*K!ibe$T{mARh~A
zc^~FxE*y2D=sX0hQAG|tSIQOz%epjV{4Rs9z3(<}YFCp)`$GZE#Az-Q}fySm5V^O55I5tqGW7_Pm{$0*xlB-)sPs{%a&F
zcA>m^BK%5DCsqK?9YkOEWKr#a*<9=en#RAgon^)Mv*@#SfM}$KZRnBG@unKx2;ni&
zF`&;h`qK{`e8ix+?@(%VpwjMH#PA%`X_>aj%nVCk
zQ_x4WMaH|OaOs@W2*nM+v{#Gj0ZdPxr_09@g6Im-w#NIK)fv&hb(YVjUVE;+3J_FG
ze_Ku;!(MgsV7Lg|A1dbM?PS%*?V7$EOeWs1+6njA8*GOV?jm{I|A1yxYRK|qb^XR(
zH{I5plG8h*)L8KMnmYJ35w%pu5Pdik^2CcWBqH^)3ojROBrW(RLpnCrE^=cipBe;O
z+1<+PoIqSe{K$)a?eC|_eVWBKQdN;sSDq<-_!~ab?F4=)XYZ>6F5-#~SGY}wxcge}
zu#l9(Sxt0J0~*M;jh6QiQ@$m$I?+xnMY%mKb8#9LEx&9gf@vo~}L9BH6s?=>xtZae}Pl
z{YB^uZowvo+yUtpR^Gnh8LM&jvBU}=A0Om3{5Rm4x=_+#7GEGVS~r8_-@rwM_Zs
zJa;SsLf#rHxj3%n3;9V;?L$ZxcQy|S+r^j!j$u^lW?{k07b-usrd#>yJu5*6poa^`f{xjyf&1LSH`}TT0
zpU=nUN`S@Uc-lm-33M{JMjqcf$>}uRQ@i++@};D*cKw9uiT8HQA4#zW+#|sMTCtlN
zb!fSRgqc<8-G$dK&YJjHgV+6k&fN>e4vr+QzKyjGoo#I<_0;$i=loR&Q+RI8!7
z7fxiX)IWUk-e!1(QUE$cB`$H-qn|L?$MjU+cJYt)U8<43B^e1`$|aw4M9WUy+5S|j
ze#y=zyNkfI36nIYcpOww6R*tFqmpzY68s&*i;VBQIFY&e^IL>?H1n}SHMs(?_*N0~
zJ!nLU=|xmg8}@LgEOaU
zHYH)h)HmC%OgaxvVCUYc$Y$NVZdTG1yT>eou^{8(_f%MCErIxh@YzSU-@Nj_-ME|;
zRd?ks&^!H$3iUszJ^l}lNw59?=l}En<=hdaK(kupPrpr8!1EcZZ0`q(IRBWG_0;3L
zu-H3eDNR?{7(S7~!-_gaQ#lCNW1o$CA~lX61dKQ}
zn&h2)+jb<-e_vN5-|w|_+Db@mD_Wl$+PehagQOOpX$5#qd(jxVw<=(8(a#q@hwkrLgY7;XycKyU<`
z`CzvP#ufGopbH|kWqw#({!Xv5Ol6ZYZ?{l^DJ9*kA?F;>C=&mGde-Ge
zu&%ZozwK`~r}PGlp2@rw4{|M&=N>DGI##0UQ^xQp;mc-CV=C^>b73&7lL(XQisvP5
zAneiI5hsp%5C}q`Mt?O>0ulp*BY!?=oRBeWyu@oE<`SAw(iFlCiYgP39Z`JN^e;};
zT8MDC)X|+>3hZ}32`tVSgbGd(0<(}KAE*yk^x8zHeku2VmcxPM$kzfIgyn_hgFr11qC7usZIIyliT
zFYA4pA|I>As?u$p0%J$AY)oKt#2nPx+9O#`I1ua7Fl5w0JjIx9ilatT
zJgi6Q+9ab!+F3uNi`^eE+NyQ*>UqyZXkrYL$Af~H%+_kJ0xb`R@lW5|0k~@jMLi#p
zf#p^?KvSXo&Ve6C%hHZ4+|iFe<$pP4F2%2Pb775nW97jReXZE+m(afd<8EDMgfo
z?-o5w3Q)dGyY$9iIUwOQ54*VK(=yL+y{R>&`nP`YGMB9|F7>Hru$MqE|xgz|b&ZOHFtxnaH8%E!jHzVti#JSAKHFtb+}i7_9HviG3f
zrn^8{4cOu6E^?oATb4#cT}bmMy-X~R?z2L4kb=U&RjOeLrvax=CzZ2Az%Q8F*|9BO
z>GGifl9KT0XT$c~P>^*LsNH%xxx-C)TnURy4V?!fLb*JCHBYcRbZjP?td)LVWggR(
zAH9s5=_iI~KJZsWLa5=h^RcJSy%N%*8(`OB{e=bVnSfz)mo-)X5O2*p@AA-1Vz_>b
z(x7$4+w;$C;@dOZcTl^
zYoEuwzJB=>bZRiIY-@EYD>@95NOvXo;14{gX$)aXck@OGJHqv1SH7j>nNGs?38Dj6
z_j!h57@Hn}X_W3tS;Wy45YT3742hP?kcP8$JB3Z;rB;@eZiTNY&0Ubvo=*sI{|sN#
zy~T!}q4QIV!pNPlloHnIIXKLwMZKFim$T<8}sWxaA7KQu{6R&PF
zK%8xuD3#OTSC&;e#H
ze9uQT5U&*T7N&5M;-Ay6t@UTT0c?=m8d1UxI3c1q;hWtNZG0{2@W7rkA@wSy8y@|U
z<{R!FT?shxA!DR>7z2`*}_5m+(5h+&c5fQjn>xHC7w6nz4U~xpfS{
zhn^5F89>k0En6osc#yMDFj5{4
zvu&JMzHly+bl#sM_v)XTzQTZ08!>6ClJVj)1~sA37^C*E#SG*So4$_qOTU{lbW+o?@pHU@e1H4i?Vukx`If$UoU7fa9I`h-8tNFawTP1`&MJWnP;C041z=LBm-D~{*M{=sgT|2O#tysCj
zQ?tchnXv-O(6lwl>Stq`H#sY}+6i9Ku2b`3@^W;_IUD8t+}lbmpJFv7ewz+!=>gq+
z%~zZs&~(sc&|N}@@w?q_S`+&g%Z($K-*Z~8R98Q(5;$aIOURCt;u`?Ppu!?f7I|w7
z{P;9CXP9Im_bmSHc@f#1VI~+D*>7-ajG=&>axr
z6euM&4!r}8`h~ElKu+MWck2S(KTrcyl9#vwwr|bP#necD$(xf2LcRKN-!$5z*d%6&
z0A=yP@w6zo6StVH)2&8-m-2P981Kth%74`>0KMvN;~7DO5jO|GU7}%aRpkqBvi$sB
z;H$uHQGC+}n0SPoWCabx7`bb}FW}~exIlTFq2#tFf0cI31)o?p~z<@-OK8x3hXOd|ud}^qdU?`E5SvBi-WLI120Wfm%Ra0g1
z&T&I(JDE>ryZR}cU?RuB1yqW8JbGm}9S?TTVAvfz
zNc(oApJ0Yo>=1Ez+%u9!9|a2GZ*=Do`7wkXfm3^eZsr}XVX=~RL+p-cn2EvG6b7VH
zgG{7Ljw@@xp0shawg@+W9Z`Ju0@W}q9S9^PZT_1+k7S@m2ciJf5YP#PYL&5A`46*i
zFQ?9N#OlgIjA56N{ZZlRloNeZ?KnXt0b5FA?7NoHEhWt74#M9%ZR~jD)>G!=$gr?RN6a_HDwF5+anUi=F$dB!ZJZGw>x1ewh&C
zRvcL+qvlt%*BEJjIP^<*+*JQX9<9F)l07XRB=i=WW*p7;A@qzMNBIKvony7w%}@2X
z8<+W8khULh@be0MkxZ6)j2Q`>-6TCO;X$%(G!{htE$FFgyfId4H$R}*j(#4YHGseq
zCGK02S}ow)fptHa*W_wsPbk^wam&dYA;=
zGZXs{NOEcMU4^(k7kHXoH`nsK*ek7XvaulM@|XlGn}4Oa1RhjUV$ewpk$1TfPIPLBR
zbc6~>^6A$#Ke2%aeyISo_sEum_p4QZ3s>%t)`x=0I>2rI
z;?C{yT93L<*o^+myO-{mNLZX}0UuaUfH8M>bBQS3w%Iv7S|Wt%xUO0E^_;Gjn&8%2
zS)J--radb_oFS<}GeIz;(q^sNFo3XzywfECIh-|UNbmco{ijTZ<&6P7Y{gR?GLwGr
zB8P-HN{*v@KPmusOE85$jB@6VCvY+A$>>$?WWC(Oyxwew`eJ4#S4qsX@YN2mWDdpH
z*lIRrtur0VW^k=y_z%+-Ulw><>OQQkVR~09uZ34H23eWdz^XKd~G?=j@xO
z_{V-c++bQ`PyhdV>;LcDaq0h5V*nsyc<5l13L}dyKAzM__^?5_l7IZH^nGyUPfXZAg6Fvp0*;$9{-4p!_
zbEEFijF6+qGq@?bc7+wWgjmg|Yf<_4fitb6kTsU+>-3)3Re)0Upv|0$rJbSX8&V}|
z5MGql?NaYxxg)vaSCzcVztT)SehmO&dg3A0x6?ptdx0}^KrvZAg%3pJ0L@=yk=~I3eAE27MM-8nl$6s>m;Nd5w(bIrAul|H~Q^ugyp
z(_1Eb5XHiba8T5#zutr^oj7siXo=v?9xc7qXs;&x&-wl1t0x!4j6@X(ZIQjO8=t4P
zpilJV%c&5&XkhZ5rTOp6H9D+~jg6^6A7WAJ<0ZEZCBZap#<)yZ0fw#sONm{B2-dgX
zFPQzQS2lh+H1)oGyaU%VPBs{3Zr_h;EJ>)3x!vI;iIIO&tAXR2gOxdjS5yqFxHx?J
z&ixC7bje~`)}y!#bwXwndU2f9o}&!tTBFWsY@wL@6l+yqfQz{QIsN{^h{FvZ<_(op
z6kk0oQIcZ9LcM6xq{aGWCL+WtAkbu{^XVwqv7iIZkdS(
zHY!6hNNd9?1|JxBx(r|x_;RVfzm}zO>ci`MAT~p`haiC4g6oMW<~+{V^VckVgW1_i
zdc|WOBpEzAkNVn7>5iJhL&1%SNC~QkNyv{QR*3ctR<|OW?1G~!a}NVR~+WY)LA@FLr)PUt)rfG?uq^+~=+rQaPOd9a7aU
z74K+$dSek7uf^w}I$0kB!+LZw;FG^__bl&H$D`fZzQ`4>0A@6C$q?c@vy{5r=s51Q
zXvzC@wz)EFUp%a1`E1P;6S;SJ6~C0>q%}^5kXo-eHqNw;UWo4(?m@!mxLTsp{*E#D
zaVaGY5$XF202%sh3pIjVaji)+#P0}xJF>d3Y=^fkBt`3QG6o~awEqhL6PjeNT3Om)
z$CmZjnu#>2rKr9|U-@IqfKc7Ts#!
z@H2ma0PorDI<=pJCDA-n_d`>LRdvmb3X8Qh7VUeWVt`$0oapiKb`~&&h~Kf72|UP8
z+)Iu%j>F5;z2zHwtxezjX29=PA^>%tYZ%v1VfWL)bt<{70ZVegjfYY93{TX3$TPW}
zZR+gH@`uNVd9NYuPst4aAYlaRQ^Gue@nw}M9$nv3-pZ}}`SBJ&0;(p;iVjenX;O3*GH#2LRC6D1nS49iQsIu~VP8I{
zpEk>OzgOdDLLnhU)lkypJ;M076!%EU#GkHR?n1vn_0t@`GP>jZYB*BP0rb1vf~X0^
z!gNOgBOvOz)y_b+>WHYVn$?W3`-_jq9!97|DqZ;b;+ohS{TGdGej8o?QgDNqV%Z*s
zf$lbp)jX;uVjCON#C+~32EC1a|F6~aU%TMY`&lPs-)*NMEz9&Gw;6u|ck;{Frvx5C
zkX6jf(9qLGEp#xT^t^M#qi}wcyZ(?9cot*;g8!_A}DfBC$#Fy1Vzwb62`t-|At_gXTU4d#^
zH-IoD0M%6<%84SBLd5ADiSa{jnb>q6Jmh6#YAPsaP?>!>L*5AX=Hi<>=Ru%LpeEA$Iy&(c&qt;#C4!J~Bma%PW&dM|Y(sUjo?+`5
z%-Epl=Um7j6fSgqIkt*!%%a;x5V`pEB}bf4)%sDpPv>o^CaUAPv$rJlx~naFbPtt--L0|S$xkG_Y7zZ
zI61!(3w~Uq@S$7q5xeYDW9h%4?H%m{(Sch~%Bc0s9Pv!WvP1m^JtMjLRNe-QyRx=3
zOe>*>x23p_K4P->hTn&vz}E90Lr+Ns7Fh)J(da>fpsU9g!<_k%9+A^
zgX{4>I22#a=Py&y%!@6r4$O|55Kx+e-)vh^B~^#mqW<1ZeLybbe1QK=Hom-ju1t`}
zxEmg9X-7F-D6$je$>c$Ze6pSl={WGV{G(g*&U15Ddl%@dz&D2|0PR|{QqArF+d=r8
z<3_Pc&h;s}`-eWO4^GCiJAocz7rs0qSN$ox01DGShchuai7?FvjPn2ibB~{NMcXHT
zfbUbq&l`B}i+r&m8wK;tI&D#gx=C}U-$vifCw5Q1@*t(d>-9B7YUbzWI*PixlHdF+
z#C`28NA@A~$YG869YEQNKa(8R_RO6%+W_zZ&x?zIgqWg4L4Q}f$lc4hIa=rBgXKXq
zps6X?uW2YW)4=9pxSvY
zO8%A+?Hc_1!4IL^4Wml$*ksL4srj$>vcGE!0Y3C|`pu~a$8TB`h~{Bp$AKN9YJi;K
zSL(aD?1MPR+@H#9GX0_Bgn*9Mr*PyO(Al(YaEcQ;4q;Ya&Zr{wf5(hDH9qRAW%3YD
zKOS^Q=;4rBO*_7YdGaqIb^?+YeNZ^TP^1Ib+y)q?)`;vprj}aOR4mlTd8bZ&;5X)l
zE<*4_3;$JZ2CVV-;Fw-FNwRW@<{Og4!!fh^%!teRIV{5apg%yE%@Rh7x=ZQD1Nx&D
zuqLin-Ni~K@{Fh_M5CL&nwX}sVWBlt4j1YIOxa6fTc9-+$t@zpt?1rGsV~w>9-m^3
zSq9CRTnB&s1B!hkxxd;ba{LnKvy%Z6uqOqg!(!MvqOv%IrH}A6t
z=+mS9Z$Necg{^%Fg5aKn5-XgzA?D-qC^uEZU8_ANb7NW3W
z&58uB%qi4EFsGB_da>wNO7|W`VZq1jra;XrN%lN5F~KmrcflCI8E{9RT-pnblDiAP21&U8Nxnr$sW`@)+nP>RBlA+
z56coKH9BmAG6c^E_c8-hS(*#7gTYE}>>LsRz*}3u44VgGUE!3TG`5$%Fga*>k&so{
z!8pSicj&hOB@~yiB2360gmW)_G4OKi{`fPMEyd+)ybirmOIQcxUmnehmVE8KxOBSb
z@q0ku+NMRnKr@E}q15CgYVDcNk$apuvU!=Y#!bnJeLP@8xRyNb9k3i-F_4;}4iQi9
z*W}h#+glf?9c7g(e9M=^ySpzGo@HSMB4>1Nu}xhDkaH=PFzpXVNkx8D4)czSbG>1{
zI>JAbJh`&U?!^Q7r&()D_1AQIx2$UDXI-VUT~*Aitgz5}PXkhke10RkMf>}a-GKbE
zX=xv11mm
zWw5`CqYE~6XfjlepXqsp$`+eUW=N0MlyiJZay)%Ki=M2nOKQeUfBqW=p79_S_Nx>X
ziSojvpyj+>$n+yFq^b8Z&JO|^_zW#
z*U_1bxr@;{E=9=8X9Dp5ocR;gulZJa&Ow>jj8AA)kbavdGvHHd%EHU8Er}(dc$9wv
zqe?%Ov?!OX^fPbV-QRa58M=Nfc9f1*>Hj+wM$?BcLQ_yD^<-+x7NdvUMXw*zyTrr{
z{@!-eU4C24aoj>@t-kU?nqnFkT>6P%DygYzs^*z90%?St{nLm3pwko_{j@G|
zg&}=hh$E_wc?{MOGOsT=B#uig+P~m8RAu~W9I=Xd<{)W6jwwOY1u$^$TPE54_MZy+
zyaEzEuU7mWKIz?oNjbA5*meeP-<_9T{c&UV@fGLi0M+^YJe0ZMcNI|YQRUF+o%$Hg
z-V$cIkxoM+LCMw*`!{Y^Nn$W~i2McmBu&?T@xPMEncRTe*wkX#k(Yu^GQvQpMYOYs
z^VIS{HEJ^cja{6gzw*xp*9GHT(^C$TMp9E6wEM`3Dw_UU!GmXS9p6UyF6k~#GZO06>p}ku?A(sJ*+x2
zT#b=yy`aPFl8Si
z^5@RbTlUe@^@gPiKmY2!WRqW!4O<~hSrLb*@&Kehv?&hfO8o~
zCo^K%_hdE;_93$Z1XNnvC4&q-ZHl}dnJe7n$NIZ-R)fDig1Ojxc(k|uc9#zFfjNOA
z&49XOL@@xO?Op1~4D6+b@fOF@zt61vRb^FPiGI71*|PF+aBMMFpGJcIPQV}s7QqvA
zXF%5sOV!h;Sik!#@OP_S_q<|==KJrbLV{kO*7pNSTyJzLL~!iDa~#N=N_@0Dxm;Ib
z)1dh@#bu7Y-_+Bn^HZ-O8-UJ+P)dGFQ%V6V4Y0uo(R$YG4khbV^ds3cyiMMBTAq|r&uKm#c6IiD$2}2!LFvUr1;U&}APM=5e73heFTYrP>M=$L56;Uw
z`W*9t6JF+h>GK)axL1lHJQka<5Cs2mqmqFzT;Updu$3Iq)h0CjxcN?l^MW>VO7vdh
z{4!@hzr#}tZQ%B^2qyFgPM46Xs0o&1;s|+)L}&SYUU~XjXr1cc=P_#>0Wbangm{{0
z&$bv%E5QGk#nvw)*^r)&-0_Tf_6>Fn3mLN&NmgGj&W(HnG+maS6gTOHucQ0loF)o2
zjjnzMfeFZ5JYAQfG4wliSyzRLntwaSjCiy8N-oiw*Fq|6tX%InD`HKER#Sqn1VLPF
zZlsyyo*qe102woPNY)>Lb|JIIq~Z#41W^)8iV?H$_pogQ%qB9*yy|E(&}kRXsZL4B
z=23b?P1%u*8v;R{dVfA*Ttn{ZnkXMslPEIVEVwKpJP0~G)@rJcKbm#+d*Sh=G55NK
z;0F;`YZhL()RCQ%03OJRe3K_3WBGaXZSIe3x!k6!wY^omuRUqQY)Op4lFz2k$a!}u
z)~RL5-6(z)6p*Dka4NC!AJDtIn;E7F6_ASSNBn?73zObfKL6&wuooGs$kZOMDRnhr(QZ3iR`n_g-14
z-)zVl>>QO#Vm~YFFk*TlZViOV#RbOb!R6Y->7I2da;pj0_oD}ouew%0&~-(+nR78Q
zYyHgx#rT`R5AhK6Fx`Thv)sx*C=4Y+$Wt@RqMtv_Db~ilRJ&>5W_Egs+DHN?>v3|a
z`ON}>r{4Zgd-YyhsCx@v+vwFIFi*xBD3Ol`_5j}yX~Md
znui9MF0jeUbD
zg#md7DEu|&zsE-YPoDp0z*SC*50`hT2xNXERI-hh0j0=;v&%kTC-
zAgiZ=XtLsK$bBE?k*#f+=Ef}e3q9NOkKKm>HK2u?Dop}m_zKv1c8J_Jw^GObw=xr_
z^{;*Wl!@Epa~m>*h%shn{d@7yOduge&B3&aHFslOwC;oQj&%QNIkXnu9IDd$+tPOw
z@DeB8qkMZak$jc?n?@I4`RC3C>^n)KJBja}8fmNm?xh#O&vc4Lz6_NzxjEB#=w{<|
z3vyE+JGEfFRu4w#&mY+bU%r3!eb=>tjqnWbxhVi8^hYdFs+QfFNkanGo8L4zJMl`VfA)-Dda4V5x&p}ju57m2rIQ>TmzhXz
z0+jqj91xj9>u4+%m{_tQPo+w^3fMMh8xDj8h4*-0)H#{s%d-C$eHQ23tMS(hXvDhJ
z5c0>*AGNrs+$^OCm`v2{TMpRD(bB*1$;YHzFFcgdIKU(U6)%`=9mi{n^0@(WQ0wyM1V3v5vg?_Y|%z4h%mXVE^K|9O(lbs8=FL;)%?q?jqYW7ZVIPDpkAijwCOLt7hLVnYpJ2O%
zbZ*uJHV&l3-`cm}JxYk@_O&MX%iJPAUKARC?eA<>aIzs!!YJd{HT@S27DpuyDJnA&
zT|#cBD1qrn&W)xgv(FjA3ZZ55eI5P2gJyXdcxukYFH%9KT7c)y&~H16ncHyzQUefy
z)R}cDe4dQ=eua;i;sIUk`usLK(ou-j?7_V56f3ZBdk=>}1pv8*!K=#p9pXgh&(s>l
zNK-vw`h3V%W8826kS7cUw(j4X<%k>9C`71NwIkKC*G({E!M9-l$6MCU>_yA(qnSm(${-q=W?oD+R<4F0e@bD4O!vVKz@rootbL(+K8|^whv2+%IF>Z8Ysvlo3EB_
ztXEfcMbCScvx7~1YTE4#wH#=C#CpW=jPs*m8JW{{Em_8B7iJ)pYk7>(S;C{Nc&0C`
zWqj*Sp;^`E1#!vO=6B5XKQ6Ns`(;scfpV2PiY;M(=yc@?!ntbwsVib>rl5PivB5bZ
zK+NvddDBZ`+3Fpnwk8R!^4H-0z$Z8Xh6<{~fFZNa;GljDIa16(KY>#}pK
z5}(JPZ>3_T;^M1|u~5UyoI^ss25PONSSw9^GsCZuwblDokyl?Zg>-V1%4>s&jv?&g
zq~-z=Qjq?DoZN#3pQ|GRw3eGo!g+F9c_+7PlXEpN%QX?U1mTk2Bkf#Upuo`=-CF=&E*T?{&imGilybf#b7N
z-FJ_AWB1c-7|-5OlR8L`wFa4MR~}_OxuT6J+Ny`60;{YAHr@0o8i(-=3FF6~)^r|4
zJaZn9y1i?4w!x6)9@lR6VuBRp(dO)7a*M_5hfxpV#gp@{a~4crKDA|7@p{RGh!j^7`k3(QtRQ~%SnG1QX$>K8
zMx9WR@