暗语初步支持 (#27)

This commit is contained in:
Akarin~
2025-06-10 13:30:54 +08:00
committed by GitHub
parent 6bfa2c39a1
commit dc6786deab
7 changed files with 96 additions and 18 deletions

View File

@ -26,17 +26,19 @@ from nonebot.plugin import require
require("nonebot_plugin_alconna")
require("nonebot_plugin_localstore")
require("nonebot_plugin_argot")
import nonebot_plugin_localstore as store # type: ignore
from nonebot import get_driver, logger # type: ignore
from .config import config
# from .hunyuan import *
from .dev import *
from .marsho import *
from .metadata import metadata
# from .hunyuan import *
__author__ = "Asankilp"
__plugin_meta__ = metadata

View File

@ -1,15 +1,16 @@
import os
from pathlib import Path
from nonebot import get_driver, logger, require
from nonebot import get_driver, logger, on_command, require
from nonebot.adapters import Bot, Event
from nonebot.matcher import Matcher
from nonebot.typing import T_State
from nonebot_plugin_argot import add_argot, get_message_id
from nonebot_plugin_marshoai.plugin.load import reload_plugin
from .config import config
from .marsho import context
from .instances import context
from .plugin.func_call.models import SessionContext
require("nonebot_plugin_alconna")
@ -48,6 +49,21 @@ function_call = on_alconna(
permission=SUPERUSER,
)
argot_test = on_command("argot", permission=SUPERUSER)
@argot_test.handle()
async def _():
await argot_test.send(
"aa",
argot={
"name": "test",
"command": "test",
"segment": f"{os.getcwd()}",
"expired_at": 1000,
},
)
@function_call.assign("list")
async def list_functions():

View File

@ -1,4 +1,5 @@
import json
from datetime import timedelta
from typing import Optional, Tuple, Union
from azure.ai.inference.models import (
@ -17,7 +18,13 @@ from nonebot.matcher import (
current_event,
current_matcher,
)
from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg, get_message_id, get_target
from nonebot_plugin_alconna.uniseg import (
Text,
UniMessage,
UniMsg,
get_target,
)
from nonebot_plugin_argot import Argot # type: ignore
from openai import AsyncOpenAI, AsyncStream
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
@ -35,7 +42,7 @@ from .util import (
make_chat_openai,
parse_richtext,
)
from .utils.processor import process_chat_stream
from .utils.processor import process_chat_stream, process_completion_to_details
class MarshoHandler:
@ -230,12 +237,28 @@ class MarshoHandler:
target_list.append([self.target.id, self.target.private])
# 对话成功发送消息
send_message = UniMessage()
if config.marshoai_enable_richtext_parse:
await (await parse_richtext(str(choice_msg_content))).send(
reply_to=True
)
send_message = await parse_richtext(str(choice_msg_content))
else:
await UniMessage(str(choice_msg_content)).send(reply_to=True)
send_message = UniMessage(str(choice_msg_content))
send_message.append(
Argot(
"detail",
Text(await process_completion_to_details(response)),
command="detail",
expired_at=timedelta(minutes=5),
)
)
# send_message.append(
# Argot(
# "debug",
# Text(str(response)),
# command=f"debug",
# expired_at=timedelta(minutes=5),
# )
# )
await send_message.send(reply_to=True)
return UserMessage(content=user_message), choice_msg_after
elif choice.finish_reason == CompletionsFinishReason.CONTENT_FILTERED:

View File

@ -23,6 +23,7 @@ from nonebot_plugin_alconna import (
message_reaction,
on_alconna,
)
from nonebot_plugin_argot.extension import ArgotExtension # type: ignore
from .config import config
from .constants import INTRODUCTION, SUPPORT_IMAGE_MODELS
@ -63,6 +64,7 @@ marsho_cmd = on_alconna(
aliases=tuple(config.marshoai_aliases),
priority=96,
block=True,
extensions=[ArgotExtension()],
)
resetmem_cmd = on_alconna(
Alconna(

View File

@ -16,7 +16,7 @@ async def process_chat_stream(
last_chunk = chunk
# print(chunk)
if not is_first_token_appeared:
logger.debug(f"{chunk.id}: 第一个 token 已出现")
logger.info(f"{chunk.id}: 第一个 token 已出现")
is_first_token_appeared = True
if not chunk.choices:
logger.info("Usage:", chunk.usage)
@ -29,7 +29,7 @@ async def process_chat_stream(
reasoning_contents += delta.reasoning_content
else:
if not is_answering:
logger.debug(
logger.info(
f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出"
)
is_answering = True
@ -69,3 +69,19 @@ async def process_chat_stream(
object="chat.completion",
usage=None,
)
async def process_completion_to_details(completion: ChatCompletion) -> str:
usage_text = ""
usage = completion.usage
if usage is None:
usage_text = ""
else:
usage_text = str(usage)
details_text = f"""=========消息详情=========
模型: {completion.model}
消息 ID: {completion.id}
用量信息: {usage_text}"""
# print(details_text)
return details_text