From f9f15c1662e3bdc7e687d545cd89cac77b5c540d Mon Sep 17 00:00:00 2001 From: Asankilp Date: Fri, 4 Apr 2025 16:19:17 +0800 Subject: [PATCH] =?UTF-8?q?=E8=A7=A3=E5=86=B3=E7=B1=BB=E5=9E=8B=E9=97=AE?= =?UTF-8?q?=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_marshoai/handler.py | 13 ++- nonebot_plugin_marshoai/marsho.py | 8 +- nonebot_plugin_marshoai/utils/request.py | 118 ++++++++++++----------- 3 files changed, 73 insertions(+), 66 deletions(-) diff --git a/nonebot_plugin_marshoai/handler.py b/nonebot_plugin_marshoai/handler.py index 4afc9fc..f04285e 100644 --- a/nonebot_plugin_marshoai/handler.py +++ b/nonebot_plugin_marshoai/handler.py @@ -17,7 +17,7 @@ from nonebot.matcher import ( current_event, current_matcher, ) -from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg +from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg, get_message_id, get_target from openai import AsyncOpenAI, AsyncStream from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage @@ -50,8 +50,8 @@ class MarshoHandler: self.event: Event = current_event.get() # self.state: T_State = current_handler.get().state self.matcher: Matcher = current_matcher.get() - self.message_id: str = UniMessage.get_message_id(self.event) - self.target = UniMessage.get_target(self.event) + self.message_id: str = get_message_id(self.event) + self.target = get_target(self.event) async def process_user_input( self, user_input: UniMsg, model_name: str @@ -117,7 +117,7 @@ class MarshoHandler: async def handle_function_call( self, - completion: Union[ChatCompletion, AsyncStream[ChatCompletionChunk]], + completion: Union[ChatCompletion], user_message: Union[str, list], model_name: str, tools_list: list | None = None, @@ -259,7 +259,7 @@ class MarshoHandler: model_name: str, tools_list: list | None = None, tools_message: Optional[list] = None, - ) -> Union[ChatCompletion, None]: + ) -> ChatCompletion: """ 处理流式请求 """ @@ -274,5 +274,4 @@ class MarshoHandler: if isinstance(response, AsyncStream): return await process_chat_stream(response) else: - logger.error("Unexpected response type for stream request") - return None + raise TypeError("Unexpected response type for stream request") diff --git a/nonebot_plugin_marshoai/marsho.py b/nonebot_plugin_marshoai/marsho.py index 6219bdc..8ef3ffd 100644 --- a/nonebot_plugin_marshoai/marsho.py +++ b/nonebot_plugin_marshoai/marsho.py @@ -299,14 +299,16 @@ with contextlib.suppress(ImportError): # 优化先不做() if config.marshoai_poke_suffix != "": logger.info(f"收到戳一戳,用户昵称:{user_nickname}") - response = await make_chat_openai( + pre_response = await make_chat_openai( client=client, model_name=model_name, msg=usermsg, stream=config.marshoai_stream, ) - if isinstance(response, AsyncStream): - response = await process_chat_stream(response) + if isinstance(pre_response, AsyncStream): + response = await process_chat_stream(pre_response) + else: + response = pre_response choice = response.choices[0] # type: ignore if choice.finish_reason == CompletionsFinishReason.STOPPED: content = extract_content_and_think(choice.message)[0] diff --git a/nonebot_plugin_marshoai/utils/request.py b/nonebot_plugin_marshoai/utils/request.py index 542ca26..ae83d11 100644 --- a/nonebot_plugin_marshoai/utils/request.py +++ b/nonebot_plugin_marshoai/utils/request.py @@ -1,5 +1,3 @@ -from typing import Optional - from nonebot.log import logger from openai import AsyncStream from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage @@ -8,58 +6,66 @@ from openai.types.chat.chat_completion import Choice async def process_chat_stream( stream: AsyncStream[ChatCompletionChunk], -) -> Optional[ChatCompletion]: - if isinstance(stream, AsyncStream): - reasoning_contents = "" - answer_contents = "" - last_chunk = None - is_first_token_appeared = False - is_answering = False - async for chunk in stream: - last_chunk = chunk - # print(chunk) - if not is_first_token_appeared: - logger.debug(f"{chunk.id}: 第一个 token 已出现") - is_first_token_appeared = True - if not chunk.choices: - logger.info("Usage:", chunk.usage) +) -> ChatCompletion: + reasoning_contents = "" + answer_contents = "" + last_chunk = None + is_first_token_appeared = False + is_answering = False + async for chunk in stream: + last_chunk = chunk + # print(chunk) + if not is_first_token_appeared: + logger.debug(f"{chunk.id}: 第一个 token 已出现") + is_first_token_appeared = True + if not chunk.choices: + logger.info("Usage:", chunk.usage) + else: + delta = chunk.choices[0].delta + if ( + hasattr(delta, "reasoning_content") + and delta.reasoning_content is not None + ): + reasoning_contents += delta.reasoning_content else: - delta = chunk.choices[0].delta - if ( - hasattr(delta, "reasoning_content") - and delta.reasoning_content is not None - ): - reasoning_contents += delta.reasoning_content - else: - if not is_answering: - logger.debug( - f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出" - ) - is_answering = True - if delta.content is not None: - answer_contents += delta.content - # print(last_chunk) - # 创建新的 ChatCompletion 对象 - if last_chunk and last_chunk.choices: - message = ChatCompletionMessage( - content=answer_contents, - role="assistant", - tool_calls=last_chunk.choices[0].delta.tool_calls, # type: ignore - ) - if reasoning_contents != "": - setattr(message, "reasoning_content", reasoning_contents) - choice = Choice( - finish_reason=last_chunk.choices[0].finish_reason, # type: ignore - index=last_chunk.choices[0].index, - message=message, - ) - return ChatCompletion( - id=last_chunk.id, - choices=[choice], - created=last_chunk.created, - model=last_chunk.model, - system_fingerprint=last_chunk.system_fingerprint, - object="chat.completion", - usage=last_chunk.usage, - ) - return None + if not is_answering: + logger.debug( + f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出" + ) + is_answering = True + if delta.content is not None: + answer_contents += delta.content + # print(last_chunk) + # 创建新的 ChatCompletion 对象 + if last_chunk and last_chunk.choices: + message = ChatCompletionMessage( + content=answer_contents, + role="assistant", + tool_calls=last_chunk.choices[0].delta.tool_calls, # type: ignore + ) + if reasoning_contents != "": + setattr(message, "reasoning_content", reasoning_contents) + choice = Choice( + finish_reason=last_chunk.choices[0].finish_reason, # type: ignore + index=last_chunk.choices[0].index, + message=message, + ) + return ChatCompletion( + id=last_chunk.id, + choices=[choice], + created=last_chunk.created, + model=last_chunk.model, + system_fingerprint=last_chunk.system_fingerprint, + object="chat.completion", + usage=last_chunk.usage, + ) + else: + return ChatCompletion( + id="", + choices=[], + created=0, + model="", + system_fingerprint="", + object="chat.completion", + usage=None, + )