初步支持&utils重构

This commit is contained in:
2025-04-04 15:13:04 +08:00
parent c9d2ef7885
commit 3a1a0c39fd
6 changed files with 100 additions and 75 deletions

View File

@ -36,6 +36,7 @@ from .util import (
make_chat_openai, make_chat_openai,
parse_richtext, parse_richtext,
) )
from .utils.request import process_chat_stream
class MarshoHandler: class MarshoHandler:
@ -120,7 +121,7 @@ class MarshoHandler:
completion: Union[ChatCompletion, AsyncStream[ChatCompletionChunk]], completion: Union[ChatCompletion, AsyncStream[ChatCompletionChunk]],
user_message: Union[str, list], user_message: Union[str, list],
model_name: str, model_name: str,
tools_list: list, tools_list: list | None = None,
): ):
# function call # function call
# 需要获取额外信息,调用函数工具 # 需要获取额外信息,调用函数工具
@ -188,7 +189,7 @@ class MarshoHandler:
self, self,
user_message: Union[str, list], user_message: Union[str, list],
model_name: str, model_name: str,
tools_list: list, tools_list: list | None = None,
stream: bool = False, stream: bool = False,
tool_message: Optional[list] = None, tool_message: Optional[list] = None,
) -> Optional[Tuple[UserMessage, ChatCompletionMessage]]: ) -> Optional[Tuple[UserMessage, ChatCompletionMessage]]:
@ -257,13 +258,13 @@ class MarshoHandler:
self, self,
user_message: Union[str, list], user_message: Union[str, list],
model_name: str, model_name: str,
tools_list: list, tools_list: list | None = None,
tools_message: Optional[list] = None, tools_message: Optional[list] = None,
) -> Union[ChatCompletion, None]: ) -> Union[ChatCompletion, None]:
""" """
处理流式请求 处理流式请求
""" """
response = await self.handle_single_chat( response: AsyncStream[ChatCompletionChunk] = await self.handle_single_chat(
user_message=user_message, user_message=user_message,
model_name=model_name, model_name=model_name,
tools_list=None, # TODO:让流式调用支持工具调用 tools_list=None, # TODO:让流式调用支持工具调用
@ -271,57 +272,4 @@ class MarshoHandler:
stream=True, stream=True,
) )
if isinstance(response, AsyncStream): return await process_chat_stream(response)
reasoning_contents = ""
answer_contents = ""
last_chunk = None
is_first_token_appeared = False
is_answering = False
async for chunk in response:
last_chunk = chunk
# print(chunk)
if not is_first_token_appeared:
logger.debug(f"{chunk.id}: 第一个 token 已出现")
is_first_token_appeared = True
if not chunk.choices:
logger.info("Usage:", chunk.usage)
else:
delta = chunk.choices[0].delta
if (
hasattr(delta, "reasoning_content")
and delta.reasoning_content is not None
):
reasoning_contents += delta.reasoning_content
else:
if not is_answering:
logger.debug(
f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出"
)
is_answering = True
if delta.content is not None:
answer_contents += delta.content
# print(last_chunk)
# 创建新的 ChatCompletion 对象
if last_chunk and last_chunk.choices:
message = ChatCompletionMessage(
content=answer_contents,
role="assistant",
tool_calls=last_chunk.choices[0].delta.tool_calls, # type: ignore
)
if reasoning_contents != "":
setattr(message, "reasoning_content", reasoning_contents)
choice = Choice(
finish_reason=last_chunk.choices[0].finish_reason, # type: ignore
index=last_chunk.choices[0].index,
message=message,
)
return ChatCompletion(
id=last_chunk.id,
choices=[choice],
created=last_chunk.created,
model=last_chunk.model,
system_fingerprint=last_chunk.system_fingerprint,
object="chat.completion",
usage=last_chunk.usage,
)
return None

View File

@ -15,7 +15,14 @@ from nonebot.params import CommandArg
from nonebot.permission import SUPERUSER from nonebot.permission import SUPERUSER
from nonebot.rule import to_me from nonebot.rule import to_me
from nonebot.typing import T_State from nonebot.typing import T_State
from nonebot_plugin_alconna import MsgTarget, UniMessage, UniMsg, on_alconna from nonebot_plugin_alconna import (
Emoji,
MsgTarget,
UniMessage,
UniMsg,
message_reaction,
on_alconna,
)
from .config import config from .config import config
from .constants import INTRODUCTION, SUPPORT_IMAGE_MODELS from .constants import INTRODUCTION, SUPPORT_IMAGE_MODELS
@ -23,6 +30,7 @@ from .handler import MarshoHandler
from .hooks import * from .hooks import *
from .instances import client, context, model_name, target_list, tools from .instances import client, context, model_name, target_list, tools
from .metadata import metadata from .metadata import metadata
from .models import MarshoContext
from .plugin.func_call.caller import get_function_calls from .plugin.func_call.caller import get_function_calls
from .util import * from .util import *
@ -226,6 +234,7 @@ async def marsho(
if not text: if not text:
# 发送说明 # 发送说明
# await UniMessage(metadata.usage + "\n当前使用的模型" + model_name).send() # await UniMessage(metadata.usage + "\n当前使用的模型" + model_name).send()
await message_reaction(Emoji("38"))
await marsho_cmd.finish(INTRODUCTION) await marsho_cmd.finish(INTRODUCTION)
backup_context = await get_backup_context(target.id, target.private) backup_context = await get_backup_context(target.id, target.private)
if backup_context: if backup_context:
@ -256,6 +265,7 @@ async def marsho(
map(lambda v: v.data(), get_function_calls().values()) map(lambda v: v.data(), get_function_calls().values())
) )
logger.info(f"正在获取回答,模型:{model_name}") logger.info(f"正在获取回答,模型:{model_name}")
await message_reaction(Emoji("66"))
# logger.info(f"上下文:{context_msg}") # logger.info(f"上下文:{context_msg}")
response = await handler.handle_common_chat( response = await handler.handle_common_chat(
usermsg, model_name, tools_lists, config.marshoai_stream usermsg, model_name, tools_lists, config.marshoai_stream
@ -282,19 +292,21 @@ with contextlib.suppress(ImportError): # 优化先不做()
async def poke(event: Event): async def poke(event: Event):
user_nickname = await get_nickname_by_user_id(event.get_user_id()) user_nickname = await get_nickname_by_user_id(event.get_user_id())
usermsg = await get_prompt(model_name) + [
UserMessage(content=f"*{user_nickname}{config.marshoai_poke_suffix}"),
]
try: try:
if config.marshoai_poke_suffix != "": if config.marshoai_poke_suffix != "":
logger.info(f"收到戳一戳,用户昵称:{user_nickname}") logger.info(f"收到戳一戳,用户昵称:{user_nickname}")
response = await make_chat_openai( if config.marshoai_stream:
client=client, handler = MarshoHandler(client, MarshoContext())
model_name=model_name, response = await handler.handle_stream_request(usermsg, model_name)
msg=await get_prompt(model_name) else:
+ [ response = await make_chat_openai(
UserMessage( client=client,
content=f"*{user_nickname}{config.marshoai_poke_suffix}" model_name=model_name,
), msg=usermsg,
], )
)
choice = response.choices[0] # type: ignore choice = response.choices[0] # type: ignore
if choice.finish_reason == CompletionsFinishReason.STOPPED: if choice.finish_reason == CompletionsFinishReason.STOPPED:
content = extract_content_and_think(choice.message)[0] content = extract_content_and_think(choice.message)[0]

View File

@ -18,7 +18,7 @@ from nonebot_plugin_alconna import Text as TextMsg
from nonebot_plugin_alconna import UniMessage from nonebot_plugin_alconna import UniMessage
from openai import AsyncOpenAI, AsyncStream, NotGiven from openai import AsyncOpenAI, AsyncStream, NotGiven
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
from zhDateTime import DateTime from zhDateTime import DateTime # type: ignore
from ._types import DeveloperMessage from ._types import DeveloperMessage
from .cache.decos import * from .cache.decos import *

View File

@ -0,0 +1,65 @@
from typing import Optional
from nonebot.log import logger
from openai import AsyncStream
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
from openai.types.chat.chat_completion import Choice
async def process_chat_stream(
stream: AsyncStream[ChatCompletionChunk],
) -> Optional[ChatCompletion]:
if isinstance(stream, AsyncStream):
reasoning_contents = ""
answer_contents = ""
last_chunk = None
is_first_token_appeared = False
is_answering = False
async for chunk in stream:
last_chunk = chunk
# print(chunk)
if not is_first_token_appeared:
logger.debug(f"{chunk.id}: 第一个 token 已出现")
is_first_token_appeared = True
if not chunk.choices:
logger.info("Usage:", chunk.usage)
else:
delta = chunk.choices[0].delta
if (
hasattr(delta, "reasoning_content")
and delta.reasoning_content is not None
):
reasoning_contents += delta.reasoning_content
else:
if not is_answering:
logger.debug(
f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出"
)
is_answering = True
if delta.content is not None:
answer_contents += delta.content
# print(last_chunk)
# 创建新的 ChatCompletion 对象
if last_chunk and last_chunk.choices:
message = ChatCompletionMessage(
content=answer_contents,
role="assistant",
tool_calls=last_chunk.choices[0].delta.tool_calls, # type: ignore
)
if reasoning_contents != "":
setattr(message, "reasoning_content", reasoning_contents)
choice = Choice(
finish_reason=last_chunk.choices[0].finish_reason, # type: ignore
index=last_chunk.choices[0].index,
message=message,
)
return ChatCompletion(
id=last_chunk.id,
choices=[choice],
created=last_chunk.created,
model=last_chunk.model,
system_fingerprint=last_chunk.system_fingerprint,
object="chat.completion",
usage=last_chunk.usage,
)
return None

8
pdm.lock generated
View File

@ -5,7 +5,7 @@
groups = ["default", "dev", "test"] groups = ["default", "dev", "test"]
strategy = ["inherit_metadata"] strategy = ["inherit_metadata"]
lock_version = "4.5.0" lock_version = "4.5.0"
content_hash = "sha256:d7ab3d9ca825de512d4f87ec846f7fddcf3d5796a7c9562e60c8c7d39c058817" content_hash = "sha256:9dd3edfe69c332deac360af2685358e82c5dac0870900668534fc6f1d34040f8"
[[metadata.targets]] [[metadata.targets]]
requires_python = "~=3.10" requires_python = "~=3.10"
@ -1485,7 +1485,7 @@ files = [
[[package]] [[package]]
name = "nonebot-plugin-alconna" name = "nonebot-plugin-alconna"
version = "0.54.1" version = "0.57.0"
requires_python = ">=3.9" requires_python = ">=3.9"
summary = "Alconna Adapter for Nonebot" summary = "Alconna Adapter for Nonebot"
groups = ["default"] groups = ["default"]
@ -1499,8 +1499,8 @@ dependencies = [
"tarina<0.7,>=0.6.8", "tarina<0.7,>=0.6.8",
] ]
files = [ files = [
{file = "nonebot_plugin_alconna-0.54.1-py3-none-any.whl", hash = "sha256:4edb4b081cd64ce37717c7a92d31aadd2cf287a5a0adc2ac86ed82d9bcad5048"}, {file = "nonebot_plugin_alconna-0.57.0-py3-none-any.whl", hash = "sha256:6c4bcce1a9aa176244b4c011b19b1cea00269c4c6794cd4e90d8dd7990ec3ec9"},
{file = "nonebot_plugin_alconna-0.54.1.tar.gz", hash = "sha256:66fae03120b8eff25bb0027d65f149e399aa6f73c7585ebdd388d1904cecdeee"}, {file = "nonebot_plugin_alconna-0.57.0.tar.gz", hash = "sha256:7a9a4bf373f3f6836611dbde1a0917b84441a534dd6f2b20dae3ba6fff142858"},
] ]
[[package]] [[package]]

View File

@ -10,7 +10,7 @@ authors = [
] ]
dependencies = [ dependencies = [
"nonebot2>=2.4.0", "nonebot2>=2.4.0",
"nonebot-plugin-alconna>=0.48.0", "nonebot-plugin-alconna>=0.57.0",
"nonebot-plugin-localstore>=0.7.1", "nonebot-plugin-localstore>=0.7.1",
"zhDatetime>=2.0.0", "zhDatetime>=2.0.0",
"aiohttp>=3.9", "aiohttp>=3.9",