mirror of
https://github.com/LiteyukiStudio/nonebot-plugin-marshoai.git
synced 2025-06-19 21:27:45 +00:00
Compare commits
5 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
dc6786deab | ||
6bfa2c39a1 | |||
2ce29e45e7 | |||
55f9c427b7 | |||
|
5768b95b09 |
2
.github/workflows/docs-build.yml
vendored
2
.github/workflows/docs-build.yml
vendored
@ -52,7 +52,7 @@ jobs:
|
|||||||
- name: "发布"
|
- name: "发布"
|
||||||
run: |
|
run: |
|
||||||
npx -p "@getmeli/cli" meli upload docs/.vitepress/dist \
|
npx -p "@getmeli/cli" meli upload docs/.vitepress/dist \
|
||||||
--url "https://pages.liteyuki.icu" \
|
--url "https://dash.apage.dev" \
|
||||||
--site "$MELI_SITE" \
|
--site "$MELI_SITE" \
|
||||||
--token "$MELI_TOKEN" \
|
--token "$MELI_TOKEN" \
|
||||||
--release "$GITHUB_SHA"
|
--release "$GITHUB_SHA"
|
||||||
|
@ -26,17 +26,19 @@ from nonebot.plugin import require
|
|||||||
|
|
||||||
require("nonebot_plugin_alconna")
|
require("nonebot_plugin_alconna")
|
||||||
require("nonebot_plugin_localstore")
|
require("nonebot_plugin_localstore")
|
||||||
|
require("nonebot_plugin_argot")
|
||||||
|
|
||||||
import nonebot_plugin_localstore as store # type: ignore
|
import nonebot_plugin_localstore as store # type: ignore
|
||||||
from nonebot import get_driver, logger # type: ignore
|
from nonebot import get_driver, logger # type: ignore
|
||||||
|
|
||||||
from .config import config
|
from .config import config
|
||||||
|
|
||||||
# from .hunyuan import *
|
|
||||||
from .dev import *
|
from .dev import *
|
||||||
from .marsho import *
|
from .marsho import *
|
||||||
from .metadata import metadata
|
from .metadata import metadata
|
||||||
|
|
||||||
|
# from .hunyuan import *
|
||||||
|
|
||||||
|
|
||||||
__author__ = "Asankilp"
|
__author__ = "Asankilp"
|
||||||
__plugin_meta__ = metadata
|
__plugin_meta__ = metadata
|
||||||
|
|
||||||
|
@ -1,15 +1,16 @@
|
|||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from nonebot import get_driver, logger, require
|
from nonebot import get_driver, logger, on_command, require
|
||||||
from nonebot.adapters import Bot, Event
|
from nonebot.adapters import Bot, Event
|
||||||
from nonebot.matcher import Matcher
|
from nonebot.matcher import Matcher
|
||||||
from nonebot.typing import T_State
|
from nonebot.typing import T_State
|
||||||
|
from nonebot_plugin_argot import add_argot, get_message_id
|
||||||
|
|
||||||
from nonebot_plugin_marshoai.plugin.load import reload_plugin
|
from nonebot_plugin_marshoai.plugin.load import reload_plugin
|
||||||
|
|
||||||
from .config import config
|
from .config import config
|
||||||
from .marsho import context
|
from .instances import context
|
||||||
from .plugin.func_call.models import SessionContext
|
from .plugin.func_call.models import SessionContext
|
||||||
|
|
||||||
require("nonebot_plugin_alconna")
|
require("nonebot_plugin_alconna")
|
||||||
@ -48,6 +49,21 @@ function_call = on_alconna(
|
|||||||
permission=SUPERUSER,
|
permission=SUPERUSER,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
argot_test = on_command("argot", permission=SUPERUSER)
|
||||||
|
|
||||||
|
|
||||||
|
@argot_test.handle()
|
||||||
|
async def _():
|
||||||
|
await argot_test.send(
|
||||||
|
"aa",
|
||||||
|
argot={
|
||||||
|
"name": "test",
|
||||||
|
"command": "test",
|
||||||
|
"segment": f"{os.getcwd()}",
|
||||||
|
"expired_at": 1000,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@function_call.assign("list")
|
@function_call.assign("list")
|
||||||
async def list_functions():
|
async def list_functions():
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
|
from datetime import timedelta
|
||||||
from typing import Optional, Tuple, Union
|
from typing import Optional, Tuple, Union
|
||||||
|
|
||||||
from azure.ai.inference.models import (
|
from azure.ai.inference.models import (
|
||||||
@ -17,10 +18,15 @@ from nonebot.matcher import (
|
|||||||
current_event,
|
current_event,
|
||||||
current_matcher,
|
current_matcher,
|
||||||
)
|
)
|
||||||
from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg
|
from nonebot_plugin_alconna.uniseg import (
|
||||||
|
Text,
|
||||||
|
UniMessage,
|
||||||
|
UniMsg,
|
||||||
|
get_target,
|
||||||
|
)
|
||||||
|
from nonebot_plugin_argot import Argot # type: ignore
|
||||||
from openai import AsyncOpenAI, AsyncStream
|
from openai import AsyncOpenAI, AsyncStream
|
||||||
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
||||||
from openai.types.chat.chat_completion import Choice
|
|
||||||
|
|
||||||
from .config import config
|
from .config import config
|
||||||
from .constants import SUPPORT_IMAGE_MODELS
|
from .constants import SUPPORT_IMAGE_MODELS
|
||||||
@ -36,6 +42,7 @@ from .util import (
|
|||||||
make_chat_openai,
|
make_chat_openai,
|
||||||
parse_richtext,
|
parse_richtext,
|
||||||
)
|
)
|
||||||
|
from .utils.processor import process_chat_stream, process_completion_to_details
|
||||||
|
|
||||||
|
|
||||||
class MarshoHandler:
|
class MarshoHandler:
|
||||||
@ -51,7 +58,7 @@ class MarshoHandler:
|
|||||||
# self.state: T_State = current_handler.get().state
|
# self.state: T_State = current_handler.get().state
|
||||||
self.matcher: Matcher = current_matcher.get()
|
self.matcher: Matcher = current_matcher.get()
|
||||||
self.message_id: str = UniMessage.get_message_id(self.event)
|
self.message_id: str = UniMessage.get_message_id(self.event)
|
||||||
self.target = UniMessage.get_target(self.event)
|
self.target = get_target(self.event)
|
||||||
|
|
||||||
async def process_user_input(
|
async def process_user_input(
|
||||||
self, user_input: UniMsg, model_name: str
|
self, user_input: UniMsg, model_name: str
|
||||||
@ -117,10 +124,10 @@ class MarshoHandler:
|
|||||||
|
|
||||||
async def handle_function_call(
|
async def handle_function_call(
|
||||||
self,
|
self,
|
||||||
completion: Union[ChatCompletion, AsyncStream[ChatCompletionChunk]],
|
completion: Union[ChatCompletion],
|
||||||
user_message: Union[str, list],
|
user_message: Union[str, list],
|
||||||
model_name: str,
|
model_name: str,
|
||||||
tools_list: list,
|
tools_list: list | None = None,
|
||||||
):
|
):
|
||||||
# function call
|
# function call
|
||||||
# 需要获取额外信息,调用函数工具
|
# 需要获取额外信息,调用函数工具
|
||||||
@ -188,7 +195,7 @@ class MarshoHandler:
|
|||||||
self,
|
self,
|
||||||
user_message: Union[str, list],
|
user_message: Union[str, list],
|
||||||
model_name: str,
|
model_name: str,
|
||||||
tools_list: list,
|
tools_list: list | None = None,
|
||||||
stream: bool = False,
|
stream: bool = False,
|
||||||
tool_message: Optional[list] = None,
|
tool_message: Optional[list] = None,
|
||||||
) -> Optional[Tuple[UserMessage, ChatCompletionMessage]]:
|
) -> Optional[Tuple[UserMessage, ChatCompletionMessage]]:
|
||||||
@ -230,12 +237,28 @@ class MarshoHandler:
|
|||||||
target_list.append([self.target.id, self.target.private])
|
target_list.append([self.target.id, self.target.private])
|
||||||
|
|
||||||
# 对话成功发送消息
|
# 对话成功发送消息
|
||||||
|
send_message = UniMessage()
|
||||||
if config.marshoai_enable_richtext_parse:
|
if config.marshoai_enable_richtext_parse:
|
||||||
await (await parse_richtext(str(choice_msg_content))).send(
|
send_message = await parse_richtext(str(choice_msg_content))
|
||||||
reply_to=True
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
await UniMessage(str(choice_msg_content)).send(reply_to=True)
|
send_message = UniMessage(str(choice_msg_content))
|
||||||
|
send_message.append(
|
||||||
|
Argot(
|
||||||
|
"detail",
|
||||||
|
Text(await process_completion_to_details(response)),
|
||||||
|
command="detail",
|
||||||
|
expired_at=timedelta(minutes=5),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# send_message.append(
|
||||||
|
# Argot(
|
||||||
|
# "debug",
|
||||||
|
# Text(str(response)),
|
||||||
|
# command=f"debug",
|
||||||
|
# expired_at=timedelta(minutes=5),
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
await send_message.send(reply_to=True)
|
||||||
return UserMessage(content=user_message), choice_msg_after
|
return UserMessage(content=user_message), choice_msg_after
|
||||||
elif choice.finish_reason == CompletionsFinishReason.CONTENT_FILTERED:
|
elif choice.finish_reason == CompletionsFinishReason.CONTENT_FILTERED:
|
||||||
|
|
||||||
@ -257,9 +280,9 @@ class MarshoHandler:
|
|||||||
self,
|
self,
|
||||||
user_message: Union[str, list],
|
user_message: Union[str, list],
|
||||||
model_name: str,
|
model_name: str,
|
||||||
tools_list: list,
|
tools_list: list | None = None,
|
||||||
tools_message: Optional[list] = None,
|
tools_message: Optional[list] = None,
|
||||||
) -> Union[ChatCompletion, None]:
|
) -> ChatCompletion:
|
||||||
"""
|
"""
|
||||||
处理流式请求
|
处理流式请求
|
||||||
"""
|
"""
|
||||||
@ -272,56 +295,6 @@ class MarshoHandler:
|
|||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(response, AsyncStream):
|
if isinstance(response, AsyncStream):
|
||||||
reasoning_contents = ""
|
return await process_chat_stream(response)
|
||||||
answer_contents = ""
|
else:
|
||||||
last_chunk = None
|
raise TypeError("Unexpected response type for stream request")
|
||||||
is_first_token_appeared = False
|
|
||||||
is_answering = False
|
|
||||||
async for chunk in response:
|
|
||||||
last_chunk = chunk
|
|
||||||
# print(chunk)
|
|
||||||
if not is_first_token_appeared:
|
|
||||||
logger.debug(f"{chunk.id}: 第一个 token 已出现")
|
|
||||||
is_first_token_appeared = True
|
|
||||||
if not chunk.choices:
|
|
||||||
logger.info("Usage:", chunk.usage)
|
|
||||||
else:
|
|
||||||
delta = chunk.choices[0].delta
|
|
||||||
if (
|
|
||||||
hasattr(delta, "reasoning_content")
|
|
||||||
and delta.reasoning_content is not None
|
|
||||||
):
|
|
||||||
reasoning_contents += delta.reasoning_content
|
|
||||||
else:
|
|
||||||
if not is_answering:
|
|
||||||
logger.debug(
|
|
||||||
f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出"
|
|
||||||
)
|
|
||||||
is_answering = True
|
|
||||||
if delta.content is not None:
|
|
||||||
answer_contents += delta.content
|
|
||||||
# print(last_chunk)
|
|
||||||
# 创建新的 ChatCompletion 对象
|
|
||||||
if last_chunk and last_chunk.choices:
|
|
||||||
message = ChatCompletionMessage(
|
|
||||||
content=answer_contents,
|
|
||||||
role="assistant",
|
|
||||||
tool_calls=last_chunk.choices[0].delta.tool_calls, # type: ignore
|
|
||||||
)
|
|
||||||
if reasoning_contents != "":
|
|
||||||
setattr(message, "reasoning_content", reasoning_contents)
|
|
||||||
choice = Choice(
|
|
||||||
finish_reason=last_chunk.choices[0].finish_reason, # type: ignore
|
|
||||||
index=last_chunk.choices[0].index,
|
|
||||||
message=message,
|
|
||||||
)
|
|
||||||
return ChatCompletion(
|
|
||||||
id=last_chunk.id,
|
|
||||||
choices=[choice],
|
|
||||||
created=last_chunk.created,
|
|
||||||
model=last_chunk.model,
|
|
||||||
system_fingerprint=last_chunk.system_fingerprint,
|
|
||||||
object="chat.completion",
|
|
||||||
usage=last_chunk.usage,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
@ -15,7 +15,15 @@ from nonebot.params import CommandArg
|
|||||||
from nonebot.permission import SUPERUSER
|
from nonebot.permission import SUPERUSER
|
||||||
from nonebot.rule import to_me
|
from nonebot.rule import to_me
|
||||||
from nonebot.typing import T_State
|
from nonebot.typing import T_State
|
||||||
from nonebot_plugin_alconna import MsgTarget, UniMessage, UniMsg, on_alconna
|
from nonebot_plugin_alconna import (
|
||||||
|
Emoji,
|
||||||
|
MsgTarget,
|
||||||
|
UniMessage,
|
||||||
|
UniMsg,
|
||||||
|
message_reaction,
|
||||||
|
on_alconna,
|
||||||
|
)
|
||||||
|
from nonebot_plugin_argot.extension import ArgotExtension # type: ignore
|
||||||
|
|
||||||
from .config import config
|
from .config import config
|
||||||
from .constants import INTRODUCTION, SUPPORT_IMAGE_MODELS
|
from .constants import INTRODUCTION, SUPPORT_IMAGE_MODELS
|
||||||
@ -25,6 +33,7 @@ from .instances import client, context, model_name, target_list, tools
|
|||||||
from .metadata import metadata
|
from .metadata import metadata
|
||||||
from .plugin.func_call.caller import get_function_calls
|
from .plugin.func_call.caller import get_function_calls
|
||||||
from .util import *
|
from .util import *
|
||||||
|
from .utils.processor import process_chat_stream
|
||||||
|
|
||||||
|
|
||||||
async def at_enable():
|
async def at_enable():
|
||||||
@ -55,6 +64,7 @@ marsho_cmd = on_alconna(
|
|||||||
aliases=tuple(config.marshoai_aliases),
|
aliases=tuple(config.marshoai_aliases),
|
||||||
priority=96,
|
priority=96,
|
||||||
block=True,
|
block=True,
|
||||||
|
extensions=[ArgotExtension()],
|
||||||
)
|
)
|
||||||
resetmem_cmd = on_alconna(
|
resetmem_cmd = on_alconna(
|
||||||
Alconna(
|
Alconna(
|
||||||
@ -226,6 +236,7 @@ async def marsho(
|
|||||||
if not text:
|
if not text:
|
||||||
# 发送说明
|
# 发送说明
|
||||||
# await UniMessage(metadata.usage + "\n当前使用的模型:" + model_name).send()
|
# await UniMessage(metadata.usage + "\n当前使用的模型:" + model_name).send()
|
||||||
|
await message_reaction(Emoji("38"))
|
||||||
await marsho_cmd.finish(INTRODUCTION)
|
await marsho_cmd.finish(INTRODUCTION)
|
||||||
backup_context = await get_backup_context(target.id, target.private)
|
backup_context = await get_backup_context(target.id, target.private)
|
||||||
if backup_context:
|
if backup_context:
|
||||||
@ -256,6 +267,7 @@ async def marsho(
|
|||||||
map(lambda v: v.data(), get_function_calls().values())
|
map(lambda v: v.data(), get_function_calls().values())
|
||||||
)
|
)
|
||||||
logger.info(f"正在获取回答,模型:{model_name}")
|
logger.info(f"正在获取回答,模型:{model_name}")
|
||||||
|
await message_reaction(Emoji("66"))
|
||||||
# logger.info(f"上下文:{context_msg}")
|
# logger.info(f"上下文:{context_msg}")
|
||||||
response = await handler.handle_common_chat(
|
response = await handler.handle_common_chat(
|
||||||
usermsg, model_name, tools_lists, config.marshoai_stream
|
usermsg, model_name, tools_lists, config.marshoai_stream
|
||||||
@ -282,19 +294,23 @@ with contextlib.suppress(ImportError): # 优化先不做()
|
|||||||
async def poke(event: Event):
|
async def poke(event: Event):
|
||||||
|
|
||||||
user_nickname = await get_nickname_by_user_id(event.get_user_id())
|
user_nickname = await get_nickname_by_user_id(event.get_user_id())
|
||||||
|
usermsg = await get_prompt(model_name) + [
|
||||||
|
UserMessage(content=f"*{user_nickname}{config.marshoai_poke_suffix}"),
|
||||||
|
]
|
||||||
try:
|
try:
|
||||||
if config.marshoai_poke_suffix != "":
|
if config.marshoai_poke_suffix != "":
|
||||||
logger.info(f"收到戳一戳,用户昵称:{user_nickname}")
|
logger.info(f"收到戳一戳,用户昵称:{user_nickname}")
|
||||||
response = await make_chat_openai(
|
|
||||||
|
pre_response = await make_chat_openai(
|
||||||
client=client,
|
client=client,
|
||||||
model_name=model_name,
|
model_name=model_name,
|
||||||
msg=await get_prompt(model_name)
|
msg=usermsg,
|
||||||
+ [
|
stream=config.marshoai_stream,
|
||||||
UserMessage(
|
|
||||||
content=f"*{user_nickname}{config.marshoai_poke_suffix}"
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
if isinstance(pre_response, AsyncStream):
|
||||||
|
response = await process_chat_stream(pre_response)
|
||||||
|
else:
|
||||||
|
response = pre_response
|
||||||
choice = response.choices[0] # type: ignore
|
choice = response.choices[0] # type: ignore
|
||||||
if choice.finish_reason == CompletionsFinishReason.STOPPED:
|
if choice.finish_reason == CompletionsFinishReason.STOPPED:
|
||||||
content = extract_content_and_think(choice.message)[0]
|
content = extract_content_and_think(choice.message)[0]
|
||||||
|
@ -7,6 +7,7 @@ import sys
|
|||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from nonebot import logger
|
from nonebot import logger
|
||||||
|
from typing_extensions import deprecated
|
||||||
|
|
||||||
from .config import config
|
from .config import config
|
||||||
|
|
||||||
@ -73,6 +74,7 @@ class MarshoContext:
|
|||||||
return self._get_target_dict(is_private).setdefault(target_id, [])
|
return self._get_target_dict(is_private).setdefault(target_id, [])
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated("小棉工具已弃用,无法正常调用")
|
||||||
class MarshoTools:
|
class MarshoTools:
|
||||||
"""
|
"""
|
||||||
Marsho 的工具类
|
Marsho 的工具类
|
||||||
|
@ -18,7 +18,7 @@ from nonebot_plugin_alconna import Text as TextMsg
|
|||||||
from nonebot_plugin_alconna import UniMessage
|
from nonebot_plugin_alconna import UniMessage
|
||||||
from openai import AsyncOpenAI, AsyncStream, NotGiven
|
from openai import AsyncOpenAI, AsyncStream, NotGiven
|
||||||
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
||||||
from zhDateTime import DateTime
|
from zhDateTime import DateTime # type: ignore
|
||||||
|
|
||||||
from ._types import DeveloperMessage
|
from ._types import DeveloperMessage
|
||||||
from .cache.decos import *
|
from .cache.decos import *
|
||||||
|
87
nonebot_plugin_marshoai/utils/processor.py
Normal file
87
nonebot_plugin_marshoai/utils/processor.py
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
from nonebot.log import logger
|
||||||
|
from openai import AsyncStream
|
||||||
|
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
||||||
|
from openai.types.chat.chat_completion import Choice
|
||||||
|
|
||||||
|
|
||||||
|
async def process_chat_stream(
|
||||||
|
stream: AsyncStream[ChatCompletionChunk],
|
||||||
|
) -> ChatCompletion:
|
||||||
|
reasoning_contents = ""
|
||||||
|
answer_contents = ""
|
||||||
|
last_chunk = None
|
||||||
|
is_first_token_appeared = False
|
||||||
|
is_answering = False
|
||||||
|
async for chunk in stream:
|
||||||
|
last_chunk = chunk
|
||||||
|
# print(chunk)
|
||||||
|
if not is_first_token_appeared:
|
||||||
|
logger.info(f"{chunk.id}: 第一个 token 已出现")
|
||||||
|
is_first_token_appeared = True
|
||||||
|
if not chunk.choices:
|
||||||
|
logger.info("Usage:", chunk.usage)
|
||||||
|
else:
|
||||||
|
delta = chunk.choices[0].delta
|
||||||
|
if (
|
||||||
|
hasattr(delta, "reasoning_content")
|
||||||
|
and delta.reasoning_content is not None
|
||||||
|
):
|
||||||
|
reasoning_contents += delta.reasoning_content
|
||||||
|
else:
|
||||||
|
if not is_answering:
|
||||||
|
logger.info(
|
||||||
|
f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出"
|
||||||
|
)
|
||||||
|
is_answering = True
|
||||||
|
if delta.content is not None:
|
||||||
|
answer_contents += delta.content
|
||||||
|
# print(last_chunk)
|
||||||
|
# 创建新的 ChatCompletion 对象
|
||||||
|
if last_chunk and last_chunk.choices:
|
||||||
|
message = ChatCompletionMessage(
|
||||||
|
content=answer_contents,
|
||||||
|
role="assistant",
|
||||||
|
tool_calls=last_chunk.choices[0].delta.tool_calls, # type: ignore
|
||||||
|
)
|
||||||
|
if reasoning_contents != "":
|
||||||
|
setattr(message, "reasoning_content", reasoning_contents)
|
||||||
|
choice = Choice(
|
||||||
|
finish_reason=last_chunk.choices[0].finish_reason, # type: ignore
|
||||||
|
index=last_chunk.choices[0].index,
|
||||||
|
message=message,
|
||||||
|
)
|
||||||
|
return ChatCompletion(
|
||||||
|
id=last_chunk.id,
|
||||||
|
choices=[choice],
|
||||||
|
created=last_chunk.created,
|
||||||
|
model=last_chunk.model,
|
||||||
|
system_fingerprint=last_chunk.system_fingerprint,
|
||||||
|
object="chat.completion",
|
||||||
|
usage=last_chunk.usage,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return ChatCompletion(
|
||||||
|
id="",
|
||||||
|
choices=[],
|
||||||
|
created=0,
|
||||||
|
model="",
|
||||||
|
system_fingerprint="",
|
||||||
|
object="chat.completion",
|
||||||
|
usage=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def process_completion_to_details(completion: ChatCompletion) -> str:
|
||||||
|
usage_text = ""
|
||||||
|
usage = completion.usage
|
||||||
|
if usage is None:
|
||||||
|
usage_text = "无"
|
||||||
|
else:
|
||||||
|
usage_text = str(usage)
|
||||||
|
|
||||||
|
details_text = f"""=========消息详情=========
|
||||||
|
模型: {completion.model}
|
||||||
|
消息 ID: {completion.id}
|
||||||
|
用量信息: {usage_text}"""
|
||||||
|
# print(details_text)
|
||||||
|
return details_text
|
32
pdm.lock
generated
32
pdm.lock
generated
@ -5,7 +5,7 @@
|
|||||||
groups = ["default", "dev", "test"]
|
groups = ["default", "dev", "test"]
|
||||||
strategy = ["inherit_metadata"]
|
strategy = ["inherit_metadata"]
|
||||||
lock_version = "4.5.0"
|
lock_version = "4.5.0"
|
||||||
content_hash = "sha256:d7ab3d9ca825de512d4f87ec846f7fddcf3d5796a7c9562e60c8c7d39c058817"
|
content_hash = "sha256:6aa043fb1d2d4d384e0d0c698c02a27f22e099828d2973a4baef05c5316f4ee0"
|
||||||
|
|
||||||
[[metadata.targets]]
|
[[metadata.targets]]
|
||||||
requires_python = "~=3.10"
|
requires_python = "~=3.10"
|
||||||
@ -1485,7 +1485,7 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nonebot-plugin-alconna"
|
name = "nonebot-plugin-alconna"
|
||||||
version = "0.54.1"
|
version = "0.57.0"
|
||||||
requires_python = ">=3.9"
|
requires_python = ">=3.9"
|
||||||
summary = "Alconna Adapter for Nonebot"
|
summary = "Alconna Adapter for Nonebot"
|
||||||
groups = ["default"]
|
groups = ["default"]
|
||||||
@ -1499,8 +1499,8 @@ dependencies = [
|
|||||||
"tarina<0.7,>=0.6.8",
|
"tarina<0.7,>=0.6.8",
|
||||||
]
|
]
|
||||||
files = [
|
files = [
|
||||||
{file = "nonebot_plugin_alconna-0.54.1-py3-none-any.whl", hash = "sha256:4edb4b081cd64ce37717c7a92d31aadd2cf287a5a0adc2ac86ed82d9bcad5048"},
|
{file = "nonebot_plugin_alconna-0.57.0-py3-none-any.whl", hash = "sha256:6c4bcce1a9aa176244b4c011b19b1cea00269c4c6794cd4e90d8dd7990ec3ec9"},
|
||||||
{file = "nonebot_plugin_alconna-0.54.1.tar.gz", hash = "sha256:66fae03120b8eff25bb0027d65f149e399aa6f73c7585ebdd388d1904cecdeee"},
|
{file = "nonebot_plugin_alconna-0.57.0.tar.gz", hash = "sha256:7a9a4bf373f3f6836611dbde1a0917b84441a534dd6f2b20dae3ba6fff142858"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1519,9 +1519,27 @@ files = [
|
|||||||
{file = "nonebot_plugin_apscheduler-0.5.0.tar.gz", hash = "sha256:6c0230e99765f275dc83d6639ff33bd6f71203fa10cd1b8a204b0f95530cda86"},
|
{file = "nonebot_plugin_apscheduler-0.5.0.tar.gz", hash = "sha256:6c0230e99765f275dc83d6639ff33bd6f71203fa10cd1b8a204b0f95530cda86"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nonebot-plugin-argot"
|
||||||
|
version = "0.1.7"
|
||||||
|
requires_python = ">=3.10"
|
||||||
|
summary = "NoneBot 暗语"
|
||||||
|
groups = ["default"]
|
||||||
|
dependencies = [
|
||||||
|
"aiofiles>=24.1.0",
|
||||||
|
"nonebot-plugin-alconna>=0.51.1",
|
||||||
|
"nonebot-plugin-apscheduler>=0.5.0",
|
||||||
|
"nonebot-plugin-localstore>=0.7.4",
|
||||||
|
"nonebot2>=2.3.2",
|
||||||
|
]
|
||||||
|
files = [
|
||||||
|
{file = "nonebot_plugin_argot-0.1.7-py3-none-any.whl", hash = "sha256:1af939a60967e27aff6f7ce97150d26cba8f1ef0cf216b44372cc0d8e5937204"},
|
||||||
|
{file = "nonebot_plugin_argot-0.1.7.tar.gz", hash = "sha256:f76c2139c9af1e2de6efdc487b728fbad84737d272bf1f600d085bbe6ed79094"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nonebot-plugin-localstore"
|
name = "nonebot-plugin-localstore"
|
||||||
version = "0.7.3"
|
version = "0.7.4"
|
||||||
requires_python = "<4.0,>=3.9"
|
requires_python = "<4.0,>=3.9"
|
||||||
summary = "Local Storage Support for NoneBot2"
|
summary = "Local Storage Support for NoneBot2"
|
||||||
groups = ["default"]
|
groups = ["default"]
|
||||||
@ -1532,8 +1550,8 @@ dependencies = [
|
|||||||
"typing-extensions<5.0.0,>=4.0.0",
|
"typing-extensions<5.0.0,>=4.0.0",
|
||||||
]
|
]
|
||||||
files = [
|
files = [
|
||||||
{file = "nonebot_plugin_localstore-0.7.3-py3-none-any.whl", hash = "sha256:1bc239b4b5320df0dc08eada7c4f8ba4cb92d4dc3134bf4646ab5e297bd7e575"},
|
{file = "nonebot_plugin_localstore-0.7.4-py3-none-any.whl", hash = "sha256:3b08030878eadcdd8b9ce3d079da0dc2d0e41dc91f0b2d8cf7fa862a27de9090"},
|
||||||
{file = "nonebot_plugin_localstore-0.7.3.tar.gz", hash = "sha256:1aff10e2dacfc5bc9ce239fd34849f8d7172a118135dbc5aeba1c97605d9959d"},
|
{file = "nonebot_plugin_localstore-0.7.4.tar.gz", hash = "sha256:85ddc13814bfcd484ab311306823651390020bf44f4fb4733b343a58e72723ce"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -10,7 +10,7 @@ authors = [
|
|||||||
]
|
]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nonebot2>=2.4.0",
|
"nonebot2>=2.4.0",
|
||||||
"nonebot-plugin-alconna>=0.48.0",
|
"nonebot-plugin-alconna>=0.57.1",
|
||||||
"nonebot-plugin-localstore>=0.7.1",
|
"nonebot-plugin-localstore>=0.7.1",
|
||||||
"zhDatetime>=2.0.0",
|
"zhDatetime>=2.0.0",
|
||||||
"aiohttp>=3.9",
|
"aiohttp>=3.9",
|
||||||
@ -28,7 +28,8 @@ dependencies = [
|
|||||||
"azure-ai-inference>=1.0.0b6",
|
"azure-ai-inference>=1.0.0b6",
|
||||||
"watchdog>=6.0.0",
|
"watchdog>=6.0.0",
|
||||||
"nonebot-plugin-apscheduler>=0.5.0",
|
"nonebot-plugin-apscheduler>=0.5.0",
|
||||||
"openai>=1.58.1"
|
"openai>=1.58.1",
|
||||||
|
"nonebot-plugin-argot>=0.1.7"
|
||||||
|
|
||||||
]
|
]
|
||||||
license = { text = "MIT, Mulan PSL v2" }
|
license = { text = "MIT, Mulan PSL v2" }
|
||||||
|
Loading…
x
Reference in New Issue
Block a user