mirror of
https://github.com/LiteyukiStudio/nonebot-plugin-marshoai.git
synced 2025-06-04 20:55:21 +00:00
解决类型问题
This commit is contained in:
parent
b21ff56f43
commit
f9f15c1662
@ -17,7 +17,7 @@ from nonebot.matcher import (
|
||||
current_event,
|
||||
current_matcher,
|
||||
)
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg, get_message_id, get_target
|
||||
from openai import AsyncOpenAI, AsyncStream
|
||||
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
||||
|
||||
@ -50,8 +50,8 @@ class MarshoHandler:
|
||||
self.event: Event = current_event.get()
|
||||
# self.state: T_State = current_handler.get().state
|
||||
self.matcher: Matcher = current_matcher.get()
|
||||
self.message_id: str = UniMessage.get_message_id(self.event)
|
||||
self.target = UniMessage.get_target(self.event)
|
||||
self.message_id: str = get_message_id(self.event)
|
||||
self.target = get_target(self.event)
|
||||
|
||||
async def process_user_input(
|
||||
self, user_input: UniMsg, model_name: str
|
||||
@ -117,7 +117,7 @@ class MarshoHandler:
|
||||
|
||||
async def handle_function_call(
|
||||
self,
|
||||
completion: Union[ChatCompletion, AsyncStream[ChatCompletionChunk]],
|
||||
completion: Union[ChatCompletion],
|
||||
user_message: Union[str, list],
|
||||
model_name: str,
|
||||
tools_list: list | None = None,
|
||||
@ -259,7 +259,7 @@ class MarshoHandler:
|
||||
model_name: str,
|
||||
tools_list: list | None = None,
|
||||
tools_message: Optional[list] = None,
|
||||
) -> Union[ChatCompletion, None]:
|
||||
) -> ChatCompletion:
|
||||
"""
|
||||
处理流式请求
|
||||
"""
|
||||
@ -274,5 +274,4 @@ class MarshoHandler:
|
||||
if isinstance(response, AsyncStream):
|
||||
return await process_chat_stream(response)
|
||||
else:
|
||||
logger.error("Unexpected response type for stream request")
|
||||
return None
|
||||
raise TypeError("Unexpected response type for stream request")
|
||||
|
@ -299,14 +299,16 @@ with contextlib.suppress(ImportError): # 优化先不做()
|
||||
if config.marshoai_poke_suffix != "":
|
||||
logger.info(f"收到戳一戳,用户昵称:{user_nickname}")
|
||||
|
||||
response = await make_chat_openai(
|
||||
pre_response = await make_chat_openai(
|
||||
client=client,
|
||||
model_name=model_name,
|
||||
msg=usermsg,
|
||||
stream=config.marshoai_stream,
|
||||
)
|
||||
if isinstance(response, AsyncStream):
|
||||
response = await process_chat_stream(response)
|
||||
if isinstance(pre_response, AsyncStream):
|
||||
response = await process_chat_stream(pre_response)
|
||||
else:
|
||||
response = pre_response
|
||||
choice = response.choices[0] # type: ignore
|
||||
if choice.finish_reason == CompletionsFinishReason.STOPPED:
|
||||
content = extract_content_and_think(choice.message)[0]
|
||||
|
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from nonebot.log import logger
|
||||
from openai import AsyncStream
|
||||
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
||||
@ -8,58 +6,66 @@ from openai.types.chat.chat_completion import Choice
|
||||
|
||||
async def process_chat_stream(
|
||||
stream: AsyncStream[ChatCompletionChunk],
|
||||
) -> Optional[ChatCompletion]:
|
||||
if isinstance(stream, AsyncStream):
|
||||
reasoning_contents = ""
|
||||
answer_contents = ""
|
||||
last_chunk = None
|
||||
is_first_token_appeared = False
|
||||
is_answering = False
|
||||
async for chunk in stream:
|
||||
last_chunk = chunk
|
||||
# print(chunk)
|
||||
if not is_first_token_appeared:
|
||||
logger.debug(f"{chunk.id}: 第一个 token 已出现")
|
||||
is_first_token_appeared = True
|
||||
if not chunk.choices:
|
||||
logger.info("Usage:", chunk.usage)
|
||||
) -> ChatCompletion:
|
||||
reasoning_contents = ""
|
||||
answer_contents = ""
|
||||
last_chunk = None
|
||||
is_first_token_appeared = False
|
||||
is_answering = False
|
||||
async for chunk in stream:
|
||||
last_chunk = chunk
|
||||
# print(chunk)
|
||||
if not is_first_token_appeared:
|
||||
logger.debug(f"{chunk.id}: 第一个 token 已出现")
|
||||
is_first_token_appeared = True
|
||||
if not chunk.choices:
|
||||
logger.info("Usage:", chunk.usage)
|
||||
else:
|
||||
delta = chunk.choices[0].delta
|
||||
if (
|
||||
hasattr(delta, "reasoning_content")
|
||||
and delta.reasoning_content is not None
|
||||
):
|
||||
reasoning_contents += delta.reasoning_content
|
||||
else:
|
||||
delta = chunk.choices[0].delta
|
||||
if (
|
||||
hasattr(delta, "reasoning_content")
|
||||
and delta.reasoning_content is not None
|
||||
):
|
||||
reasoning_contents += delta.reasoning_content
|
||||
else:
|
||||
if not is_answering:
|
||||
logger.debug(
|
||||
f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出"
|
||||
)
|
||||
is_answering = True
|
||||
if delta.content is not None:
|
||||
answer_contents += delta.content
|
||||
# print(last_chunk)
|
||||
# 创建新的 ChatCompletion 对象
|
||||
if last_chunk and last_chunk.choices:
|
||||
message = ChatCompletionMessage(
|
||||
content=answer_contents,
|
||||
role="assistant",
|
||||
tool_calls=last_chunk.choices[0].delta.tool_calls, # type: ignore
|
||||
)
|
||||
if reasoning_contents != "":
|
||||
setattr(message, "reasoning_content", reasoning_contents)
|
||||
choice = Choice(
|
||||
finish_reason=last_chunk.choices[0].finish_reason, # type: ignore
|
||||
index=last_chunk.choices[0].index,
|
||||
message=message,
|
||||
)
|
||||
return ChatCompletion(
|
||||
id=last_chunk.id,
|
||||
choices=[choice],
|
||||
created=last_chunk.created,
|
||||
model=last_chunk.model,
|
||||
system_fingerprint=last_chunk.system_fingerprint,
|
||||
object="chat.completion",
|
||||
usage=last_chunk.usage,
|
||||
)
|
||||
return None
|
||||
if not is_answering:
|
||||
logger.debug(
|
||||
f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出"
|
||||
)
|
||||
is_answering = True
|
||||
if delta.content is not None:
|
||||
answer_contents += delta.content
|
||||
# print(last_chunk)
|
||||
# 创建新的 ChatCompletion 对象
|
||||
if last_chunk and last_chunk.choices:
|
||||
message = ChatCompletionMessage(
|
||||
content=answer_contents,
|
||||
role="assistant",
|
||||
tool_calls=last_chunk.choices[0].delta.tool_calls, # type: ignore
|
||||
)
|
||||
if reasoning_contents != "":
|
||||
setattr(message, "reasoning_content", reasoning_contents)
|
||||
choice = Choice(
|
||||
finish_reason=last_chunk.choices[0].finish_reason, # type: ignore
|
||||
index=last_chunk.choices[0].index,
|
||||
message=message,
|
||||
)
|
||||
return ChatCompletion(
|
||||
id=last_chunk.id,
|
||||
choices=[choice],
|
||||
created=last_chunk.created,
|
||||
model=last_chunk.model,
|
||||
system_fingerprint=last_chunk.system_fingerprint,
|
||||
object="chat.completion",
|
||||
usage=last_chunk.usage,
|
||||
)
|
||||
else:
|
||||
return ChatCompletion(
|
||||
id="",
|
||||
choices=[],
|
||||
created=0,
|
||||
model="",
|
||||
system_fingerprint="",
|
||||
object="chat.completion",
|
||||
usage=None,
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user