mirror of
https://github.com/LiteyukiStudio/nonebot-plugin-marshoai.git
synced 2025-06-06 14:05:23 +00:00
解决类型问题
This commit is contained in:
parent
b21ff56f43
commit
f9f15c1662
@ -17,7 +17,7 @@ from nonebot.matcher import (
|
|||||||
current_event,
|
current_event,
|
||||||
current_matcher,
|
current_matcher,
|
||||||
)
|
)
|
||||||
from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg
|
from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg, get_message_id, get_target
|
||||||
from openai import AsyncOpenAI, AsyncStream
|
from openai import AsyncOpenAI, AsyncStream
|
||||||
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
||||||
|
|
||||||
@ -50,8 +50,8 @@ class MarshoHandler:
|
|||||||
self.event: Event = current_event.get()
|
self.event: Event = current_event.get()
|
||||||
# self.state: T_State = current_handler.get().state
|
# self.state: T_State = current_handler.get().state
|
||||||
self.matcher: Matcher = current_matcher.get()
|
self.matcher: Matcher = current_matcher.get()
|
||||||
self.message_id: str = UniMessage.get_message_id(self.event)
|
self.message_id: str = get_message_id(self.event)
|
||||||
self.target = UniMessage.get_target(self.event)
|
self.target = get_target(self.event)
|
||||||
|
|
||||||
async def process_user_input(
|
async def process_user_input(
|
||||||
self, user_input: UniMsg, model_name: str
|
self, user_input: UniMsg, model_name: str
|
||||||
@ -117,7 +117,7 @@ class MarshoHandler:
|
|||||||
|
|
||||||
async def handle_function_call(
|
async def handle_function_call(
|
||||||
self,
|
self,
|
||||||
completion: Union[ChatCompletion, AsyncStream[ChatCompletionChunk]],
|
completion: Union[ChatCompletion],
|
||||||
user_message: Union[str, list],
|
user_message: Union[str, list],
|
||||||
model_name: str,
|
model_name: str,
|
||||||
tools_list: list | None = None,
|
tools_list: list | None = None,
|
||||||
@ -259,7 +259,7 @@ class MarshoHandler:
|
|||||||
model_name: str,
|
model_name: str,
|
||||||
tools_list: list | None = None,
|
tools_list: list | None = None,
|
||||||
tools_message: Optional[list] = None,
|
tools_message: Optional[list] = None,
|
||||||
) -> Union[ChatCompletion, None]:
|
) -> ChatCompletion:
|
||||||
"""
|
"""
|
||||||
处理流式请求
|
处理流式请求
|
||||||
"""
|
"""
|
||||||
@ -274,5 +274,4 @@ class MarshoHandler:
|
|||||||
if isinstance(response, AsyncStream):
|
if isinstance(response, AsyncStream):
|
||||||
return await process_chat_stream(response)
|
return await process_chat_stream(response)
|
||||||
else:
|
else:
|
||||||
logger.error("Unexpected response type for stream request")
|
raise TypeError("Unexpected response type for stream request")
|
||||||
return None
|
|
||||||
|
@ -299,14 +299,16 @@ with contextlib.suppress(ImportError): # 优化先不做()
|
|||||||
if config.marshoai_poke_suffix != "":
|
if config.marshoai_poke_suffix != "":
|
||||||
logger.info(f"收到戳一戳,用户昵称:{user_nickname}")
|
logger.info(f"收到戳一戳,用户昵称:{user_nickname}")
|
||||||
|
|
||||||
response = await make_chat_openai(
|
pre_response = await make_chat_openai(
|
||||||
client=client,
|
client=client,
|
||||||
model_name=model_name,
|
model_name=model_name,
|
||||||
msg=usermsg,
|
msg=usermsg,
|
||||||
stream=config.marshoai_stream,
|
stream=config.marshoai_stream,
|
||||||
)
|
)
|
||||||
if isinstance(response, AsyncStream):
|
if isinstance(pre_response, AsyncStream):
|
||||||
response = await process_chat_stream(response)
|
response = await process_chat_stream(pre_response)
|
||||||
|
else:
|
||||||
|
response = pre_response
|
||||||
choice = response.choices[0] # type: ignore
|
choice = response.choices[0] # type: ignore
|
||||||
if choice.finish_reason == CompletionsFinishReason.STOPPED:
|
if choice.finish_reason == CompletionsFinishReason.STOPPED:
|
||||||
content = extract_content_and_think(choice.message)[0]
|
content = extract_content_and_think(choice.message)[0]
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from nonebot.log import logger
|
from nonebot.log import logger
|
||||||
from openai import AsyncStream
|
from openai import AsyncStream
|
||||||
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
||||||
@ -8,8 +6,7 @@ from openai.types.chat.chat_completion import Choice
|
|||||||
|
|
||||||
async def process_chat_stream(
|
async def process_chat_stream(
|
||||||
stream: AsyncStream[ChatCompletionChunk],
|
stream: AsyncStream[ChatCompletionChunk],
|
||||||
) -> Optional[ChatCompletion]:
|
) -> ChatCompletion:
|
||||||
if isinstance(stream, AsyncStream):
|
|
||||||
reasoning_contents = ""
|
reasoning_contents = ""
|
||||||
answer_contents = ""
|
answer_contents = ""
|
||||||
last_chunk = None
|
last_chunk = None
|
||||||
@ -62,4 +59,13 @@ async def process_chat_stream(
|
|||||||
object="chat.completion",
|
object="chat.completion",
|
||||||
usage=last_chunk.usage,
|
usage=last_chunk.usage,
|
||||||
)
|
)
|
||||||
return None
|
else:
|
||||||
|
return ChatCompletion(
|
||||||
|
id="",
|
||||||
|
choices=[],
|
||||||
|
created=0,
|
||||||
|
model="",
|
||||||
|
system_fingerprint="",
|
||||||
|
object="chat.completion",
|
||||||
|
usage=None,
|
||||||
|
)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user