Compare commits

..

5 Commits

Author SHA1 Message Date
d75b468330 流式调用 90% 2025-03-07 19:02:04 +08:00
Akarin~
3fa1be27bc Merge branch 'main' into feat/stream 2025-03-07 18:54:04 +08:00
Akarin~
52d218fb8d Merge branch 'main' into feat/stream 2025-03-07 17:20:00 +08:00
Akarin~
35beecb819 Merge branch 'main' into feat/stream 2025-03-07 17:15:34 +08:00
780df08a65 流式调用 30% 2025-03-07 13:32:30 +08:00
14 changed files with 111 additions and 143 deletions

View File

@@ -1,6 +1,9 @@
name: Deploy VitePress site to Liteyuki PaaS name: Deploy VitePress site to Liteyuki PaaS
on: ["push", "pull_request_target"] on:
push:
branches: [main]
workflow_dispatch:
permissions: permissions:
contents: write contents: write
@@ -25,7 +28,7 @@ jobs:
- name: Setup Python - name: Setup Python
uses: actions/setup-python@v2 uses: actions/setup-python@v2
with: with:
python-version: "3.11" python-version: '3.11'
- name: Setup API markdown - name: Setup API markdown
run: |- run: |-
@@ -49,10 +52,11 @@ jobs:
run: |- run: |-
pnpm run docs:build pnpm run docs:build
- name: "发布" - name: "发布"
run: | run: |
npx -p "@getmeli/cli" meli upload docs/.vitepress/dist \ npx -p "@getmeli/cli" meli upload docs/.vitepress/dist \
--url "https://pages.liteyuki.icu" \ --url "https://meli.liteyuki.icu" \
--site "$MELI_SITE" \ --site "$MELI_SITE" \
--token "$MELI_TOKEN" \ --token "$MELI_TOKEN" \
--release "$GITHUB_SHA" --release "$GITHUB_SHA"

2
CNAME
View File

@@ -1 +1 @@
marshoai-docs.pages.liteyuki.icu marshoai-docs.meli.liteyuki.icu

View File

@@ -1,6 +1,6 @@
<!--suppress LongLine --> <!--suppress LongLine -->
<div align="center"> <div align="center">
<a href="https://marshoai-docs.pages.liteyuki.icu"><img src="https://marshoai-docs.pages.liteyuki.icu/marsho-full.svg" width="800" height="430" alt="MarshoLogo"></a> <a href="https://marshoai-docs.meli.liteyuki.icu"><img src="https://marshoai-docs.meli.liteyuki.icu/marsho-full.svg" width="800" height="430" alt="MarshoLogo"></a>
<br> <br>
</div> </div>
@@ -48,7 +48,7 @@ _谁不喜欢回复消息快又可爱的猫娘呢_
## 😼 使用 ## 😼 使用
请查看[使用文档](https://marshoai-docs.pages.liteyuki.icu/start/use.html) 请查看[使用文档](https://marshoai-docs.meli.liteyuki.icu/start/use)
## ❤ 鸣谢&版权说明 ## ❤ 鸣谢&版权说明

View File

@@ -1,6 +1,6 @@
<!--suppress LongLine --> <!--suppress LongLine -->
<div align="center"> <div align="center">
<a href="https://marshoai-docs.pages.liteyuki.icu"><img src="https://marshoai-docs.pages.liteyuki.icu/marsho-full.svg" width="800" height="430" alt="MarshoLogo"></a> <a href="https://marshoai-docs.meli.liteyuki.icu"><img src="https://marshoai-docs.meli.liteyuki.icu/marsho-full.svg" width="800" height="430" alt="MarshoLogo"></a>
<br> <br>
</div> </div>
@@ -48,7 +48,7 @@ Plugin internally installed the catgirl character of Marsho, is able to have a c
- 🐾 Play! I like play with friends! - 🐾 Play! I like play with friends!
## 😼 Usage ## 😼 Usage
Please read [Documentation](https://marshoai-docs.pages.liteyuki.icu/start/use.html) Please read [Documentation](https://marshoai-docs.meli.liteyuki.icu/start/install)
## ❤ Thanks&Copyright ## ❤ Thanks&Copyright
This project uses the following code from other projects: This project uses the following code from other projects:

View File

@@ -8,13 +8,12 @@ import { generateSidebar } from 'vitepress-sidebar'
// https://vitepress.dev/reference/site-config // https://vitepress.dev/reference/site-config
export default defineConfig({ export default defineConfig({
head: [ head: [
["script", { src: "https://cdn.liteyuki.icu/js/liteyuki_footer.js" }],
['link', { rel: 'icon', type: 'image/x-icon', href: '/favicon.ico' }], ['link', { rel: 'icon', type: 'image/x-icon', href: '/favicon.ico' }],
], ],
rewrites: { rewrites: {
[`${defaultLang}/:rest*`]: ":rest*", [`${defaultLang}/:rest*`]: ":rest*",
}, },
cleanUrls: false, cleanUrls: true,
themeConfig: { themeConfig: {
// https://vitepress.dev/reference/default-theme-config // https://vitepress.dev/reference/default-theme-config
logo: { logo: {

View File

@@ -65,7 +65,7 @@ When nonebot linked to OneBot v11 adapter, can recieve double click and response
MarshoTools is a feature added in `v0.5.0`, support loading external function library to provide Function Call for Marsho. MarshoTools is a feature added in `v0.5.0`, support loading external function library to provide Function Call for Marsho.
## 🧩 Marsho Plugin ## 🧩 Marsho Plugin
Marsho Plugin is a feature added in `v1.0.0`, replacing the old MarshoTools feature. [Documentation](https://marshoai-docs.pages.liteyuki.icu/dev/extension) Marsho Plugin is a feature added in `v1.0.0`, replacing the old MarshoTools feature. [Documentation](https://marshoai-docs.meli.liteyuki.icu/dev/extension)
## 👍 Praise list ## 👍 Praise list

View File

@@ -68,7 +68,7 @@ GitHub Models API 的限制较多,不建议使用,建议通过修改`MARSHOA
## 🧩 小棉插件 ## 🧩 小棉插件
小棉插件是`v1.0.0`的新增功能,替代旧的小棉工具功能。[使用文档](https://marshoai-docs.pages.liteyuki.icu/dev/extension) 小棉插件是`v1.0.0`的新增功能,替代旧的小棉工具功能。[使用文档](https://marshoai-docs.meli.liteyuki.icu/dev/extension)
## 👍 夸赞名单 ## 👍 夸赞名单

View File

@@ -37,7 +37,7 @@ OPENAI_NEW_MODELS: list = [
INTRODUCTION: str = f"""MarshoAI-NoneBot by LiteyukiStudio INTRODUCTION: str = f"""MarshoAI-NoneBot by LiteyukiStudio
你好喵~我是一只可爱的猫娘AI名叫小棉~🐾! 你好喵~我是一只可爱的猫娘AI名叫小棉~🐾!
我的主页在这里哦~↓↓↓ 我的主页在这里哦~↓↓↓
https://marshoai-docs.pages.liteyuki.icu https://marshoai-docs.meli.liteyuki.icu
※ 使用 「{config.marshoai_default_name}.status」命令获取状态信息。 ※ 使用 「{config.marshoai_default_name}.status」命令获取状态信息。
※ 使用「{config.marshoai_default_name}.help」命令获取使用说明。""" ※ 使用「{config.marshoai_default_name}.help」命令获取使用说明。"""

View File

@@ -17,9 +17,10 @@ from nonebot.matcher import (
current_event, current_event,
current_matcher, current_matcher,
) )
from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg, get_message_id, get_target from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg
from openai import AsyncOpenAI, AsyncStream from openai import AsyncOpenAI, AsyncStream
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
from openai.types.chat.chat_completion import Choice
from .config import config from .config import config
from .constants import SUPPORT_IMAGE_MODELS from .constants import SUPPORT_IMAGE_MODELS
@@ -35,7 +36,6 @@ from .util import (
make_chat_openai, make_chat_openai,
parse_richtext, parse_richtext,
) )
from .utils.request import process_chat_stream
class MarshoHandler: class MarshoHandler:
@@ -50,8 +50,8 @@ class MarshoHandler:
self.event: Event = current_event.get() self.event: Event = current_event.get()
# self.state: T_State = current_handler.get().state # self.state: T_State = current_handler.get().state
self.matcher: Matcher = current_matcher.get() self.matcher: Matcher = current_matcher.get()
self.message_id: str = get_message_id(self.event) self.message_id: str = UniMessage.get_message_id(self.event)
self.target = get_target(self.event) self.target = UniMessage.get_target(self.event)
async def process_user_input( async def process_user_input(
self, user_input: UniMsg, model_name: str self, user_input: UniMsg, model_name: str
@@ -103,7 +103,7 @@ class MarshoHandler:
处理单条聊天 处理单条聊天
""" """
context_msg = await get_prompt(model_name) + ( context_msg = get_prompt(model_name) + (
self.context.build(self.target.id, self.target.private) self.context.build(self.target.id, self.target.private)
) )
response = await make_chat_openai( response = await make_chat_openai(
@@ -117,10 +117,10 @@ class MarshoHandler:
async def handle_function_call( async def handle_function_call(
self, self,
completion: Union[ChatCompletion], completion: Union[ChatCompletion, AsyncStream[ChatCompletionChunk]],
user_message: Union[str, list], user_message: Union[str, list],
model_name: str, model_name: str,
tools_list: list | None = None, tools_list: list,
): ):
# function call # function call
# 需要获取额外信息,调用函数工具 # 需要获取额外信息,调用函数工具
@@ -188,7 +188,7 @@ class MarshoHandler:
self, self,
user_message: Union[str, list], user_message: Union[str, list],
model_name: str, model_name: str,
tools_list: list | None = None, tools_list: list,
stream: bool = False, stream: bool = False,
tool_message: Optional[list] = None, tool_message: Optional[list] = None,
) -> Optional[Tuple[UserMessage, ChatCompletionMessage]]: ) -> Optional[Tuple[UserMessage, ChatCompletionMessage]]:
@@ -210,7 +210,10 @@ class MarshoHandler:
tools_list=tools_list, tools_list=tools_list,
tool_message=tool_message, tool_message=tool_message,
) )
choice = response.choices[0] # type: ignore if isinstance(response, ChatCompletion):
choice = response.choices[0]
else:
raise ValueError("Unexpected response type")
# Sprint(choice) # Sprint(choice)
# 当tool_calls非空时将finish_reason设置为TOOL_CALLS # 当tool_calls非空时将finish_reason设置为TOOL_CALLS
if choice.message.tool_calls is not None and config.marshoai_fix_toolcalls: if choice.message.tool_calls is not None and config.marshoai_fix_toolcalls:
@@ -257,9 +260,9 @@ class MarshoHandler:
self, self,
user_message: Union[str, list], user_message: Union[str, list],
model_name: str, model_name: str,
tools_list: list | None = None, tools_list: list,
tools_message: Optional[list] = None, tools_message: Optional[list] = None,
) -> ChatCompletion: ) -> Union[ChatCompletion, None]:
""" """
处理流式请求 处理流式请求
""" """
@@ -272,6 +275,54 @@ class MarshoHandler:
) )
if isinstance(response, AsyncStream): if isinstance(response, AsyncStream):
return await process_chat_stream(response) reasoning_contents = ""
else: answer_contents = ""
raise TypeError("Unexpected response type for stream request") last_chunk = None
is_first_token_appeared = False
is_answering = False
async for chunk in response:
last_chunk = chunk
# print(chunk)
if not is_first_token_appeared:
logger.debug(f"{chunk.id}: 第一个 token 已出现")
is_first_token_appeared = True
if not chunk.choices:
logger.info("Usage:", chunk.usage)
else:
delta = chunk.choices[0].delta
if (
hasattr(delta, "reasoning_content")
and delta.reasoning_content is not None
):
reasoning_contents += delta.reasoning_content
else:
if not is_answering:
logger.debug(
f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出"
)
is_answering = True
if delta.content is not None:
answer_contents += delta.content
# print(last_chunk)
# 创建新的 ChatCompletion 对象
if last_chunk and last_chunk.choices:
message = ChatCompletionMessage(
content=answer_contents,
role="assistant",
tool_calls=last_chunk.choices[0].delta.tool_calls, # type: ignore
)
choice = Choice(
finish_reason=last_chunk.choices[0].finish_reason, # type: ignore
index=last_chunk.choices[0].index,
message=message,
)
return ChatCompletion(
id=last_chunk.id,
choices=[choice],
created=last_chunk.created,
model=last_chunk.model,
system_fingerprint=last_chunk.system_fingerprint,
object="chat.completion",
usage=last_chunk.usage,
)
return None

View File

@@ -15,14 +15,7 @@ from nonebot.params import CommandArg
from nonebot.permission import SUPERUSER from nonebot.permission import SUPERUSER
from nonebot.rule import to_me from nonebot.rule import to_me
from nonebot.typing import T_State from nonebot.typing import T_State
from nonebot_plugin_alconna import ( from nonebot_plugin_alconna import MsgTarget, UniMessage, UniMsg, on_alconna
Emoji,
MsgTarget,
UniMessage,
UniMsg,
message_reaction,
on_alconna,
)
from .config import config from .config import config
from .constants import INTRODUCTION, SUPPORT_IMAGE_MODELS from .constants import INTRODUCTION, SUPPORT_IMAGE_MODELS
@@ -32,7 +25,6 @@ from .instances import client, context, model_name, target_list, tools
from .metadata import metadata from .metadata import metadata
from .plugin.func_call.caller import get_function_calls from .plugin.func_call.caller import get_function_calls
from .util import * from .util import *
from .utils.request import process_chat_stream
async def at_enable(): async def at_enable():
@@ -234,7 +226,6 @@ async def marsho(
if not text: if not text:
# 发送说明 # 发送说明
# await UniMessage(metadata.usage + "\n当前使用的模型" + model_name).send() # await UniMessage(metadata.usage + "\n当前使用的模型" + model_name).send()
await message_reaction(Emoji("38"))
await marsho_cmd.finish(INTRODUCTION) await marsho_cmd.finish(INTRODUCTION)
backup_context = await get_backup_context(target.id, target.private) backup_context = await get_backup_context(target.id, target.private)
if backup_context: if backup_context:
@@ -265,7 +256,6 @@ async def marsho(
map(lambda v: v.data(), get_function_calls().values()) map(lambda v: v.data(), get_function_calls().values())
) )
logger.info(f"正在获取回答,模型:{model_name}") logger.info(f"正在获取回答,模型:{model_name}")
await message_reaction(Emoji("66"))
# logger.info(f"上下文:{context_msg}") # logger.info(f"上下文:{context_msg}")
response = await handler.handle_common_chat( response = await handler.handle_common_chat(
usermsg, model_name, tools_lists, config.marshoai_stream usermsg, model_name, tools_lists, config.marshoai_stream
@@ -292,23 +282,19 @@ with contextlib.suppress(ImportError): # 优化先不做()
async def poke(event: Event): async def poke(event: Event):
user_nickname = await get_nickname_by_user_id(event.get_user_id()) user_nickname = await get_nickname_by_user_id(event.get_user_id())
usermsg = await get_prompt(model_name) + [
UserMessage(content=f"*{user_nickname}{config.marshoai_poke_suffix}"),
]
try: try:
if config.marshoai_poke_suffix != "": if config.marshoai_poke_suffix != "":
logger.info(f"收到戳一戳,用户昵称:{user_nickname}") logger.info(f"收到戳一戳,用户昵称:{user_nickname}")
response = await make_chat_openai(
pre_response = await make_chat_openai(
client=client, client=client,
model_name=model_name, model_name=model_name,
msg=usermsg, msg=get_prompt(model_name)
stream=config.marshoai_stream, + [
UserMessage(
content=f"*{user_nickname}{config.marshoai_poke_suffix}"
),
],
) )
if isinstance(pre_response, AsyncStream):
response = await process_chat_stream(pre_response)
else:
response = pre_response
choice = response.choices[0] # type: ignore choice = response.choices[0] # type: ignore
if choice.finish_reason == CompletionsFinishReason.STOPPED: if choice.finish_reason == CompletionsFinishReason.STOPPED:
content = extract_content_and_think(choice.message)[0] content = extract_content_and_think(choice.message)[0]

View File

@@ -2,7 +2,6 @@ import base64
import json import json
import mimetypes import mimetypes
import re import re
import ssl
import uuid import uuid
from typing import Any, Dict, List, Optional, Union from typing import Any, Dict, List, Optional, Union
@@ -18,7 +17,7 @@ from nonebot_plugin_alconna import Text as TextMsg
from nonebot_plugin_alconna import UniMessage from nonebot_plugin_alconna import UniMessage
from openai import AsyncOpenAI, AsyncStream, NotGiven from openai import AsyncOpenAI, AsyncStream, NotGiven
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
from zhDateTime import DateTime # type: ignore from zhDateTime import DateTime
from ._types import DeveloperMessage from ._types import DeveloperMessage
from .cache.decos import * from .cache.decos import *
@@ -59,8 +58,6 @@ _praises_init_data = {
""" """
初始夸赞名单之数据 初始夸赞名单之数据
""" """
_ssl_context = ssl.create_default_context()
_ssl_context.set_ciphers("DEFAULT")
async def get_image_raw_and_type( async def get_image_raw_and_type(
@@ -77,7 +74,7 @@ async def get_image_raw_and_type(
tuple[bytes, str]: 图片二进制数据, 图片MIME格式 tuple[bytes, str]: 图片二进制数据, 图片MIME格式
""" """
async with httpx.AsyncClient(verify=_ssl_context) as client: async with httpx.AsyncClient() as client:
response = await client.get(url, headers=_browser_headers, timeout=timeout) response = await client.get(url, headers=_browser_headers, timeout=timeout)
if response.status_code == 200: if response.status_code == 200:
# 获取图片数据 # 获取图片数据
@@ -101,7 +98,9 @@ async def get_image_b64(url: str, timeout: int = 10) -> Optional[str]:
return: 图片base64编码 return: 图片base64编码
""" """
if data_type := await get_image_raw_and_type(url, timeout): if data_type := await get_image_raw_and_type(
url.replace("https://", "http://"), timeout
):
# image_format = content_type.split("/")[1] if content_type else "jpeg" # image_format = content_type.split("/")[1] if content_type else "jpeg"
base64_image = base64.b64encode(data_type[0]).decode("utf-8") base64_image = base64.b64encode(data_type[0]).decode("utf-8")
data_url = "data:{};base64,{}".format(data_type[1], base64_image) data_url = "data:{};base64,{}".format(data_type[1], base64_image)
@@ -137,15 +136,15 @@ async def make_chat_openai(
@from_cache("praises") @from_cache("praises")
async def get_praises(): def get_praises():
praises_file = store.get_plugin_data_file( praises_file = store.get_plugin_data_file(
"praises.json" "praises.json"
) # 夸赞名单文件使用localstore存储 ) # 夸赞名单文件使用localstore存储
if not praises_file.exists(): if not praises_file.exists():
async with aiofiles.open(praises_file, "w", encoding="utf-8") as f: with open(praises_file, "w", encoding="utf-8") as f:
await f.write(json.dumps(_praises_init_data, ensure_ascii=False, indent=4)) json.dump(_praises_init_data, f, ensure_ascii=False, indent=4)
async with aiofiles.open(praises_file, "r", encoding="utf-8") as f: with open(praises_file, "r", encoding="utf-8") as f:
data = json.loads(await f.read()) data = json.load(f)
praises_json = data praises_json = data
return praises_json return praises_json
@@ -161,8 +160,8 @@ async def refresh_praises_json():
return data return data
async def build_praises() -> str: def build_praises() -> str:
praises = await get_praises() praises = get_praises()
result = ["你喜欢以下几个人物,他们有各自的优点:"] result = ["你喜欢以下几个人物,他们有各自的优点:"]
for item in praises["like"]: for item in praises["like"]:
result.append(f"名字:{item['name']},优点:{item['advantages']}") result.append(f"名字:{item['name']},优点:{item['advantages']}")
@@ -214,8 +213,8 @@ async def set_nickname(user_id: str, name: str):
data[user_id] = name data[user_id] = name
if name == "" and user_id in data: if name == "" and user_id in data:
del data[user_id] del data[user_id]
async with aiofiles.open(filename, "w", encoding="utf-8") as f: with open(filename, "w", encoding="utf-8") as f:
await f.write(json.dumps(data, ensure_ascii=False, indent=4)) json.dump(data, f, ensure_ascii=False, indent=4)
return data return data
@@ -238,11 +237,11 @@ async def refresh_nickname_json():
logger.error("刷新 nickname_json 表错误:无法载入 nickname.json 文件") logger.error("刷新 nickname_json 表错误:无法载入 nickname.json 文件")
async def get_prompt(model: str) -> List[Dict[str, Any]]: def get_prompt(model: str) -> List[Dict[str, Any]]:
"""获取系统提示词""" """获取系统提示词"""
prompts = config.marshoai_additional_prompt prompts = config.marshoai_additional_prompt
if config.marshoai_enable_praises: if config.marshoai_enable_praises:
praises_prompt = await build_praises() praises_prompt = build_praises()
prompts += praises_prompt prompts += praises_prompt
if config.marshoai_enable_time_prompt: if config.marshoai_enable_time_prompt:

View File

@@ -1,71 +0,0 @@
from nonebot.log import logger
from openai import AsyncStream
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
from openai.types.chat.chat_completion import Choice
async def process_chat_stream(
stream: AsyncStream[ChatCompletionChunk],
) -> ChatCompletion:
reasoning_contents = ""
answer_contents = ""
last_chunk = None
is_first_token_appeared = False
is_answering = False
async for chunk in stream:
last_chunk = chunk
# print(chunk)
if not is_first_token_appeared:
logger.debug(f"{chunk.id}: 第一个 token 已出现")
is_first_token_appeared = True
if not chunk.choices:
logger.info("Usage:", chunk.usage)
else:
delta = chunk.choices[0].delta
if (
hasattr(delta, "reasoning_content")
and delta.reasoning_content is not None
):
reasoning_contents += delta.reasoning_content
else:
if not is_answering:
logger.debug(
f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出"
)
is_answering = True
if delta.content is not None:
answer_contents += delta.content
# print(last_chunk)
# 创建新的 ChatCompletion 对象
if last_chunk and last_chunk.choices:
message = ChatCompletionMessage(
content=answer_contents,
role="assistant",
tool_calls=last_chunk.choices[0].delta.tool_calls, # type: ignore
)
if reasoning_contents != "":
setattr(message, "reasoning_content", reasoning_contents)
choice = Choice(
finish_reason=last_chunk.choices[0].finish_reason, # type: ignore
index=last_chunk.choices[0].index,
message=message,
)
return ChatCompletion(
id=last_chunk.id,
choices=[choice],
created=last_chunk.created,
model=last_chunk.model,
system_fingerprint=last_chunk.system_fingerprint,
object="chat.completion",
usage=last_chunk.usage,
)
else:
return ChatCompletion(
id="",
choices=[],
created=0,
model="",
system_fingerprint="",
object="chat.completion",
usage=None,
)

8
pdm.lock generated
View File

@@ -5,7 +5,7 @@
groups = ["default", "dev", "test"] groups = ["default", "dev", "test"]
strategy = ["inherit_metadata"] strategy = ["inherit_metadata"]
lock_version = "4.5.0" lock_version = "4.5.0"
content_hash = "sha256:9dd3edfe69c332deac360af2685358e82c5dac0870900668534fc6f1d34040f8" content_hash = "sha256:d7ab3d9ca825de512d4f87ec846f7fddcf3d5796a7c9562e60c8c7d39c058817"
[[metadata.targets]] [[metadata.targets]]
requires_python = "~=3.10" requires_python = "~=3.10"
@@ -1485,7 +1485,7 @@ files = [
[[package]] [[package]]
name = "nonebot-plugin-alconna" name = "nonebot-plugin-alconna"
version = "0.57.0" version = "0.54.1"
requires_python = ">=3.9" requires_python = ">=3.9"
summary = "Alconna Adapter for Nonebot" summary = "Alconna Adapter for Nonebot"
groups = ["default"] groups = ["default"]
@@ -1499,8 +1499,8 @@ dependencies = [
"tarina<0.7,>=0.6.8", "tarina<0.7,>=0.6.8",
] ]
files = [ files = [
{file = "nonebot_plugin_alconna-0.57.0-py3-none-any.whl", hash = "sha256:6c4bcce1a9aa176244b4c011b19b1cea00269c4c6794cd4e90d8dd7990ec3ec9"}, {file = "nonebot_plugin_alconna-0.54.1-py3-none-any.whl", hash = "sha256:4edb4b081cd64ce37717c7a92d31aadd2cf287a5a0adc2ac86ed82d9bcad5048"},
{file = "nonebot_plugin_alconna-0.57.0.tar.gz", hash = "sha256:7a9a4bf373f3f6836611dbde1a0917b84441a534dd6f2b20dae3ba6fff142858"}, {file = "nonebot_plugin_alconna-0.54.1.tar.gz", hash = "sha256:66fae03120b8eff25bb0027d65f149e399aa6f73c7585ebdd388d1904cecdeee"},
] ]
[[package]] [[package]]

View File

@@ -10,7 +10,7 @@ authors = [
] ]
dependencies = [ dependencies = [
"nonebot2>=2.4.0", "nonebot2>=2.4.0",
"nonebot-plugin-alconna>=0.57.0", "nonebot-plugin-alconna>=0.48.0",
"nonebot-plugin-localstore>=0.7.1", "nonebot-plugin-localstore>=0.7.1",
"zhDatetime>=2.0.0", "zhDatetime>=2.0.0",
"aiohttp>=3.9", "aiohttp>=3.9",
@@ -34,7 +34,7 @@ dependencies = [
license = { text = "MIT, Mulan PSL v2" } license = { text = "MIT, Mulan PSL v2" }
[project.urls] [project.urls]
Homepage = "https://marshoai-docs.pages.liteyuki.icu/" Homepage = "https://marshoai-docs.meli.liteyuki.icu/"
[tool.nonebot] [tool.nonebot]