mirror of
https://github.com/LiteyukiStudio/nonebot-plugin-marshoai.git
synced 2026-01-10 22:21:47 +00:00
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
872be20950 | ||
| f5ea844156 | |||
| 4fbe6c6366 | |||
| 5b315c46b1 | |||
| d9f22fa0f7 | |||
| 091e88fe81 | |||
|
|
5efd753557 | ||
| 17551885f5 | |||
| aaa4056482 |
40
.github/workflows/docs-build.yml
vendored
40
.github/workflows/docs-build.yml
vendored
@@ -1,18 +1,26 @@
|
|||||||
name: Deploy VitePress site to Liteyuki PaaS
|
# 构建 VitePress 站点并将其部署到 GitHub Pages 的示例工作流程
|
||||||
|
#
|
||||||
|
name: Deploy VitePress site to Pages
|
||||||
|
|
||||||
on: ["push", "pull_request_target"]
|
on:
|
||||||
|
# 在针对 `main` 分支的推送上运行。如果你
|
||||||
|
# 使用 `master` 分支作为默认分支,请将其更改为 `master`
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
|
||||||
|
# 允许你从 Actions 选项卡手动运行此工作流程
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
# 设置 GITHUB_TOKEN 的权限,以允许部署到 GitHub Pages
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
statuses: write
|
|
||||||
|
|
||||||
|
# 只允许同时进行一次部署,跳过正在运行和最新队列之间的运行队列
|
||||||
|
# 但是,不要取消正在进行的运行,因为我们希望允许这些生产部署完成
|
||||||
concurrency:
|
concurrency:
|
||||||
group: pages
|
group: pages
|
||||||
cancel-in-progress: false
|
cancel-in-progress: false
|
||||||
|
|
||||||
env:
|
|
||||||
MELI_SITE: f31e3b17-c4ea-4d9d-bdce-9417d67fd30e
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# 构建工作
|
# 构建工作
|
||||||
build:
|
build:
|
||||||
@@ -22,10 +30,12 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # 如果未启用 lastUpdated,则不需要
|
fetch-depth: 0 # 如果未启用 lastUpdated,则不需要
|
||||||
|
# - uses: pnpm/action-setup@v3 # 如果使用 pnpm,请取消注释
|
||||||
|
# - uses: oven-sh/setup-bun@v1 # 如果使用 Bun,请取消注释
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: "3.11"
|
python-version: '3.11'
|
||||||
|
|
||||||
- name: Setup API markdown
|
- name: Setup API markdown
|
||||||
run: |-
|
run: |-
|
||||||
@@ -49,13 +59,9 @@ jobs:
|
|||||||
run: |-
|
run: |-
|
||||||
pnpm run docs:build
|
pnpm run docs:build
|
||||||
|
|
||||||
- name: "发布"
|
- name: 部署文档
|
||||||
run: |
|
uses: JamesIves/github-pages-deploy-action@v4
|
||||||
npx -p "@getmeli/cli" meli upload docs/.vitepress/dist \
|
with:
|
||||||
--url "https://pages.liteyuki.icu" \
|
# 这是文档部署到的分支名称
|
||||||
--site "$MELI_SITE" \
|
branch: docs
|
||||||
--token "$MELI_TOKEN" \
|
folder: docs/.vitepress/dist
|
||||||
--release "$GITHUB_SHA"
|
|
||||||
env:
|
|
||||||
MELI_TOKEN: ${{ secrets.MELI_TOKEN }}
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ repos:
|
|||||||
args: [--config=./pyproject.toml]
|
args: [--config=./pyproject.toml]
|
||||||
|
|
||||||
- repo: https://github.com/PyCQA/isort
|
- repo: https://github.com/PyCQA/isort
|
||||||
rev: 6.0.1
|
rev: 6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: isort
|
||||||
args: ["--profile", "black"]
|
args: ["--profile", "black"]
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
<!--suppress LongLine -->
|
<!--suppress LongLine -->
|
||||||
<div align="center">
|
<div align="center">
|
||||||
<a href="https://marshoai-docs.pages.liteyuki.icu"><img src="https://marshoai-docs.pages.liteyuki.icu/marsho-full.svg" width="800" height="430" alt="MarshoLogo"></a>
|
<a href="https://marsho.liteyuki.icu"><img src="https://marsho.liteyuki.icu/marsho-full.svg" width="800" height="430" alt="MarshoLogo"></a>
|
||||||
<br>
|
<br>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -48,7 +48,7 @@ _谁不喜欢回复消息快又可爱的猫娘呢?_
|
|||||||
|
|
||||||
## 😼 使用
|
## 😼 使用
|
||||||
|
|
||||||
请查看[使用文档](https://marshoai-docs.pages.liteyuki.icu/start/use.html)
|
请查看[使用文档](https://marsho.liteyuki.icu/start/use)
|
||||||
|
|
||||||
## ❤ 鸣谢&版权说明
|
## ❤ 鸣谢&版权说明
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
<!--suppress LongLine -->
|
<!--suppress LongLine -->
|
||||||
<div align="center">
|
<div align="center">
|
||||||
<a href="https://marshoai-docs.pages.liteyuki.icu"><img src="https://marshoai-docs.pages.liteyuki.icu/marsho-full.svg" width="800" height="430" alt="MarshoLogo"></a>
|
<a href="https://marsho.liteyuki.icu"><img src="https://marsho.liteyuki.icu/marsho-full.svg" width="800" height="430" alt="MarshoLogo"></a>
|
||||||
<br>
|
<br>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -48,7 +48,7 @@ Plugin internally installed the catgirl character of Marsho, is able to have a c
|
|||||||
- 🐾 Play! I like play with friends!
|
- 🐾 Play! I like play with friends!
|
||||||
|
|
||||||
## 😼 Usage
|
## 😼 Usage
|
||||||
Please read [Documentation](https://marshoai-docs.pages.liteyuki.icu/start/use.html)
|
Please read [Documentation](https://marsho.liteyuki.icu/start/install)
|
||||||
|
|
||||||
## ❤ Thanks&Copyright
|
## ❤ Thanks&Copyright
|
||||||
This project uses the following code from other projects:
|
This project uses the following code from other projects:
|
||||||
|
|||||||
@@ -1,87 +1,81 @@
|
|||||||
import { VitePressSidebarOptions } from "vitepress-sidebar/types";
|
import { VitePressSidebarOptions } from "vitepress-sidebar/types"
|
||||||
|
|
||||||
export const gitea = {
|
export const gitea = {
|
||||||
svg: '<svg t="1725391346807" class="icon" viewBox="0 0 1025 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="5067" width="256" height="256"><path d="M1004.692673 466.396616l-447.094409-447.073929c-25.743103-25.763582-67.501405-25.763582-93.264987 0l-103.873521 103.873521 78.171378 78.171378c12.533635-6.00058 26.562294-9.359266 41.389666-9.359266 53.02219 0 96.00928 42.98709 96.00928 96.00928 0 14.827372-3.358686 28.856031-9.359266 41.389666l127.97824 127.97824c12.533635-6.00058 26.562294-9.359266 41.389666-9.359266 53.02219 0 96.00928 42.98709 96.00928 96.00928s-42.98709 96.00928-96.00928 96.00928-96.00928-42.98709-96.00928-96.00928c0-14.827372 3.358686-28.856031 9.359266-41.389666l-127.97824-127.97824c-3.051489 1.454065-6.184898 2.744293-9.379746 3.870681l0 266.97461c37.273227 13.188988 63.99936 48.721433 63.99936 90.520695 0 53.02219-42.98709 96.00928-96.00928 96.00928s-96.00928-42.98709-96.00928-96.00928c0-41.799262 26.726133-77.331707 63.99936-90.520695l0-266.97461c-37.273227-13.188988-63.99936-48.721433-63.99936-90.520695 0-14.827372 3.358686-28.856031 9.359266-41.389666l-78.171378-78.171378-295.892081 295.871601c-25.743103 25.784062-25.743103 67.542365 0 93.285467l447.114889 447.073929c25.743103 25.743103 67.480925 25.743103 93.264987 0l445.00547-445.00547c25.763582-25.763582 25.763582-67.542365 0-93.285467z" fill="#a2d8f4" p-id="5068"></path></svg>',
|
svg: '<svg t="1725391346807" class="icon" viewBox="0 0 1025 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="5067" width="256" height="256"><path d="M1004.692673 466.396616l-447.094409-447.073929c-25.743103-25.763582-67.501405-25.763582-93.264987 0l-103.873521 103.873521 78.171378 78.171378c12.533635-6.00058 26.562294-9.359266 41.389666-9.359266 53.02219 0 96.00928 42.98709 96.00928 96.00928 0 14.827372-3.358686 28.856031-9.359266 41.389666l127.97824 127.97824c12.533635-6.00058 26.562294-9.359266 41.389666-9.359266 53.02219 0 96.00928 42.98709 96.00928 96.00928s-42.98709 96.00928-96.00928 96.00928-96.00928-42.98709-96.00928-96.00928c0-14.827372 3.358686-28.856031 9.359266-41.389666l-127.97824-127.97824c-3.051489 1.454065-6.184898 2.744293-9.379746 3.870681l0 266.97461c37.273227 13.188988 63.99936 48.721433 63.99936 90.520695 0 53.02219-42.98709 96.00928-96.00928 96.00928s-96.00928-42.98709-96.00928-96.00928c0-41.799262 26.726133-77.331707 63.99936-90.520695l0-266.97461c-37.273227-13.188988-63.99936-48.721433-63.99936-90.520695 0-14.827372 3.358686-28.856031 9.359266-41.389666l-78.171378-78.171378-295.892081 295.871601c-25.743103 25.784062-25.743103 67.542365 0 93.285467l447.114889 447.073929c25.743103 25.743103 67.480925 25.743103 93.264987 0l445.00547-445.00547c25.763582-25.763582 25.763582-67.542365 0-93.285467z" fill="#a2d8f4" p-id="5068"></path></svg>'
|
||||||
};
|
}
|
||||||
|
|
||||||
export const defaultLang = "zh";
|
export const defaultLang = 'zh'
|
||||||
|
|
||||||
const commonSidebarOptions: VitePressSidebarOptions = {
|
const commonSidebarOptions: VitePressSidebarOptions = {
|
||||||
collapsed: true,
|
collapsed: true,
|
||||||
convertSameNameSubFileToGroupIndexPage: true,
|
convertSameNameSubFileToGroupIndexPage: true,
|
||||||
useTitleFromFrontmatter: true,
|
useTitleFromFrontmatter: true,
|
||||||
useFolderTitleFromIndexFile: false,
|
useFolderTitleFromIndexFile: false,
|
||||||
useFolderLinkFromIndexFile: true,
|
useFolderLinkFromIndexFile: true,
|
||||||
useTitleFromFileHeading: true,
|
useTitleFromFileHeading: true,
|
||||||
rootGroupText: "MARSHOAI",
|
rootGroupText: 'MARSHOAI',
|
||||||
includeFolderIndexFile: true,
|
includeFolderIndexFile: true,
|
||||||
sortMenusByFrontmatterOrder: true,
|
sortMenusByFrontmatterOrder: true,
|
||||||
};
|
}
|
||||||
|
|
||||||
export function generateSidebarConfig(): VitePressSidebarOptions[] {
|
export function generateSidebarConfig(): VitePressSidebarOptions[] {
|
||||||
let sections = ["dev", "start"];
|
let sections = ["dev", "start"]
|
||||||
let languages = ["zh", "en"];
|
let languages = ['zh', 'en']
|
||||||
let ret: VitePressSidebarOptions[] = [];
|
let ret: VitePressSidebarOptions[] = []
|
||||||
for (let language of languages) {
|
for (let language of languages) {
|
||||||
for (let section of sections) {
|
for (let section of sections) {
|
||||||
if (language === defaultLang) {
|
if (language === defaultLang) {
|
||||||
ret.push({
|
ret.push({
|
||||||
basePath: `/${section}/`,
|
basePath: `/${section}/`,
|
||||||
scanStartPath: `docs/${language}/${section}`,
|
scanStartPath: `docs/${language}/${section}`,
|
||||||
resolvePath: `/${section}/`,
|
resolvePath: `/${section}/`,
|
||||||
...commonSidebarOptions,
|
...commonSidebarOptions
|
||||||
});
|
})
|
||||||
} else {
|
} else {
|
||||||
ret.push({
|
ret.push({
|
||||||
basePath: `/${language}/${section}/`,
|
basePath: `/${language}/${section}/`,
|
||||||
scanStartPath: `docs/${language}/${section}`,
|
scanStartPath: `docs/${language}/${section}`,
|
||||||
resolvePath: `/${language}/${section}/`,
|
resolvePath: `/${language}/${section}/`,
|
||||||
...commonSidebarOptions,
|
...commonSidebarOptions
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
return ret
|
||||||
return ret;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const ThemeConfig = {
|
export const ThemeConfig = {
|
||||||
getEditLink: (
|
getEditLink: (editPageText: string): { pattern: (params: { filePath: string; }) => string; text: string; } => {
|
||||||
editPageText: string
|
return {
|
||||||
): { pattern: (params: { filePath: string }) => string; text: string } => {
|
pattern: ({filePath}: { filePath: string; }): string => {
|
||||||
return {
|
if (!filePath) {
|
||||||
pattern: ({ filePath }: { filePath: string }): string => {
|
throw new Error("filePath is undefined");
|
||||||
if (!filePath) {
|
}
|
||||||
throw new Error("filePath is undefined");
|
const regex = /^(dev\/api|[^\/]+\/dev\/api)/;
|
||||||
}
|
if (regex.test(filePath)) {
|
||||||
const regex = /^(dev\/api|[^\/]+\/dev\/api)/;
|
filePath = filePath.replace(regex, '')
|
||||||
if (regex.test(filePath)) {
|
.replace('index.md', '__init__.py')
|
||||||
filePath = filePath
|
.replace('.md', '.py');
|
||||||
.replace(regex, "")
|
const fileName = filePath.split('/').pop();
|
||||||
.replace("index.md", "__init__.py")
|
const parentFolder = filePath.split('/').slice(-2, -1)[0];
|
||||||
.replace(".md", ".py");
|
if (fileName && parentFolder && fileName.split('.')[0] === parentFolder) {
|
||||||
const fileName = filePath.split("/").pop();
|
filePath = filePath.split('/').slice(0, -1).join('/') + '/__init__.py';
|
||||||
const parentFolder = filePath.split("/").slice(-2, -1)[0];
|
}
|
||||||
if (
|
return `https://github.com/LiteyukiStudio/nonebot-plugin-marshoai/tree/main/nonebot_plugin_marshoai/${filePath}`;
|
||||||
fileName &&
|
} else {
|
||||||
parentFolder &&
|
return `https://github.com/LiteyukiStudio/nonebot-plugin-marshoai/tree/main/docs/${filePath}`;
|
||||||
fileName.split(".")[0] === parentFolder
|
}
|
||||||
) {
|
},
|
||||||
filePath =
|
text: editPageText
|
||||||
filePath.split("/").slice(0, -1).join("/") + "/__init__.py";
|
};
|
||||||
}
|
},
|
||||||
return `https://github.com/LiteyukiStudio/nonebot-plugin-marshoai/tree/main/nonebot_plugin_marshoai/${filePath}`;
|
|
||||||
} else {
|
|
||||||
return `https://github.com/LiteyukiStudio/nonebot-plugin-marshoai/tree/main/docs/${filePath}`;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
text: editPageText,
|
|
||||||
};
|
|
||||||
},
|
|
||||||
|
|
||||||
getOutLine: (label: string): { label: string; level: [number, number] } => {
|
getOutLine: (label: string): { label: string; level: [number, number]; } => {
|
||||||
return {
|
return {
|
||||||
label: label,
|
label: label,
|
||||||
level: [2, 6],
|
level: [2, 6]
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
};
|
|
||||||
|
copyright: 'Copyright (C) 2020-2024 LiteyukiStudio. All Rights Reserved'
|
||||||
|
}
|
||||||
@@ -23,7 +23,7 @@ export const en = defineConfig({
|
|||||||
lightModeSwitchTitle: 'Light',
|
lightModeSwitchTitle: 'Light',
|
||||||
darkModeSwitchTitle: 'Dark',
|
darkModeSwitchTitle: 'Dark',
|
||||||
footer: {
|
footer: {
|
||||||
message: "The document is being improved. Suggestions are welcome.<br>Webpage is deployed at <a href='https://meli.liteyuki.icu' target='_blank'>Liteyuki Meli</a> and accelerated by <a href='https://cdn.liteyuki.icu' target='_blank'>Liteyukiflare</a>.",
|
message: "The document is being improved. Suggestions are welcome.",
|
||||||
copyright: '© 2024 <a href="https://liteyuki.icu" target="_blank">Liteyuki Studio</a>',
|
copyright: '© 2024 <a href="https://liteyuki.icu" target="_blank">Liteyuki Studio</a>',
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -8,13 +8,12 @@ import { generateSidebar } from 'vitepress-sidebar'
|
|||||||
// https://vitepress.dev/reference/site-config
|
// https://vitepress.dev/reference/site-config
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
head: [
|
head: [
|
||||||
["script", { src: "https://cdn.liteyuki.icu/js/liteyuki_footer.js" }],
|
|
||||||
['link', { rel: 'icon', type: 'image/x-icon', href: '/favicon.ico' }],
|
['link', { rel: 'icon', type: 'image/x-icon', href: '/favicon.ico' }],
|
||||||
],
|
],
|
||||||
rewrites: {
|
rewrites: {
|
||||||
[`${defaultLang}/:rest*`]: ":rest*",
|
[`${defaultLang}/:rest*`]: ":rest*",
|
||||||
},
|
},
|
||||||
cleanUrls: false,
|
cleanUrls: true,
|
||||||
themeConfig: {
|
themeConfig: {
|
||||||
// https://vitepress.dev/reference/default-theme-config
|
// https://vitepress.dev/reference/default-theme-config
|
||||||
logo: {
|
logo: {
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ export const ja = defineConfig({
|
|||||||
lightModeSwitchTitle: 'ライト',
|
lightModeSwitchTitle: 'ライト',
|
||||||
darkModeSwitchTitle: 'ダーク',
|
darkModeSwitchTitle: 'ダーク',
|
||||||
footer: {
|
footer: {
|
||||||
message: "ドキュメントは改善中です。ご意見をお待ちしております。<br>ウェブサイトは <a href='https://meli.liteyuki.icu' target='_blank'>Liteyuki Meli</a> によってデプロイされ、<a href='https://cdn.liteyuki.icu' target='_blank'>Liteyukiflare</a> によって加速されています。",
|
message: "ドキュメントは改善中です。ご意見をお待ちしております。",
|
||||||
copyright: '© 2024 <a href="https://liteyuki.icu" target="_blank">Liteyuki Studio</a>',
|
copyright: '© 2024 <a href="https://liteyuki.icu" target="_blank">Liteyuki Studio</a>',
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ export const zh = defineConfig({
|
|||||||
lightModeSwitchTitle: '轻色模式',
|
lightModeSwitchTitle: '轻色模式',
|
||||||
darkModeSwitchTitle: '深色模式',
|
darkModeSwitchTitle: '深色模式',
|
||||||
footer: {
|
footer: {
|
||||||
message: "文档完善中,欢迎提出建议或帮助我们完善。<br>网站部署在 <a href='https://meli.liteyuki.icu' target='_blank'>Liteyuki Meli</a> 由 <a href='https://cdn.liteyuki.icu' target='_blank'>Liteyukiflare</a> 提供加速服务。",
|
message: "文档完善中,欢迎提出建议或帮助我们完善。",
|
||||||
copyright: '© 2024 <a href="https://liteyuki.icu" target="_blank">Liteyuki Studio</a>',
|
copyright: '© 2024 <a href="https://liteyuki.icu" target="_blank">Liteyuki Studio</a>',
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ When nonebot linked to OneBot v11 adapter, can recieve double click and response
|
|||||||
MarshoTools is a feature added in `v0.5.0`, support loading external function library to provide Function Call for Marsho.
|
MarshoTools is a feature added in `v0.5.0`, support loading external function library to provide Function Call for Marsho.
|
||||||
|
|
||||||
## 🧩 Marsho Plugin
|
## 🧩 Marsho Plugin
|
||||||
Marsho Plugin is a feature added in `v1.0.0`, replacing the old MarshoTools feature. [Documentation](https://marshoai-docs.pages.liteyuki.icu/dev/extension)
|
Marsho Plugin is a feature added in `v1.0.0`, replacing the old MarshoTools feature. [Documentation](https://marsho.liteyuki.icu/dev/extension)
|
||||||
|
|
||||||
## 👍 Praise list
|
## 👍 Praise list
|
||||||
|
|
||||||
@@ -148,4 +148,3 @@ Add options in the `.env` file from the diagram below in nonebot2 project.
|
|||||||
| MARSHOAI_SINGLE_LATEX_PARSE | `bool` | `false`| Render single-line equation or not |
|
| MARSHOAI_SINGLE_LATEX_PARSE | `bool` | `false`| Render single-line equation or not |
|
||||||
| MARSHOAI_FIX_TOOLCALLS | `bool` | `true` | Fix tool calls or not |
|
| MARSHOAI_FIX_TOOLCALLS | `bool` | `true` | Fix tool calls or not |
|
||||||
| MARSHOAI_SEND_THINKING | `bool` | `true` | Send thinking chain or not |
|
| MARSHOAI_SEND_THINKING | `bool` | `true` | Send thinking chain or not |
|
||||||
| MARSHOAI_STREAM | `bool` | `false`| 是否通过流式方式请求 API **开启此项后暂无法使用函数调用,无法在 Bot 用户侧聊天界面呈现出流式效果** |
|
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ GitHub Models API 的限制较多,不建议使用,建议通过修改`MARSHOA
|
|||||||
|
|
||||||
## 🧩 小棉插件
|
## 🧩 小棉插件
|
||||||
|
|
||||||
小棉插件是`v1.0.0`的新增功能,替代旧的小棉工具功能。[使用文档](https://marshoai-docs.pages.liteyuki.icu/dev/extension)
|
小棉插件是`v1.0.0`的新增功能,替代旧的小棉工具功能。[使用文档](https://marsho.liteyuki.icu/dev/extension)
|
||||||
|
|
||||||
## 👍 夸赞名单
|
## 👍 夸赞名单
|
||||||
|
|
||||||
@@ -149,8 +149,6 @@ GitHub Models API 的限制较多,不建议使用,建议通过修改`MARSHOA
|
|||||||
| MARSHOAI_SINGLE_LATEX_PARSE | `bool` | `false` | 单行公式是否渲染(当消息富文本解析启用时可用)(如果单行也渲……只能说不好看) |
|
| MARSHOAI_SINGLE_LATEX_PARSE | `bool` | `false` | 单行公式是否渲染(当消息富文本解析启用时可用)(如果单行也渲……只能说不好看) |
|
||||||
| MARSHOAI_FIX_TOOLCALLS | `bool` | `true` | 是否修复工具调用(部分模型须关闭,使用 vLLM 部署的模型时须关闭) |
|
| MARSHOAI_FIX_TOOLCALLS | `bool` | `true` | 是否修复工具调用(部分模型须关闭,使用 vLLM 部署的模型时须关闭) |
|
||||||
| MARSHOAI_SEND_THINKING | `bool` | `true` | 是否发送思维链(部分模型不支持) |
|
| MARSHOAI_SEND_THINKING | `bool` | `true` | 是否发送思维链(部分模型不支持) |
|
||||||
| MARSHOAI_STREAM | `bool` | `false`| 是否通过流式方式请求 API **开启此项后暂无法使用函数调用,无法在 Bot 用户侧聊天界面呈现出流式效果** |
|
|
||||||
|
|
||||||
|
|
||||||
#### 开发及调试选项
|
#### 开发及调试选项
|
||||||
|
|
||||||
|
|||||||
@@ -32,7 +32,6 @@ class ConfigModel(BaseModel):
|
|||||||
marshoai_enable_sysasuser_prompt: bool = False
|
marshoai_enable_sysasuser_prompt: bool = False
|
||||||
marshoai_additional_prompt: str = ""
|
marshoai_additional_prompt: str = ""
|
||||||
marshoai_poke_suffix: str = "揉了揉你的猫耳"
|
marshoai_poke_suffix: str = "揉了揉你的猫耳"
|
||||||
marshoai_stream: bool = False
|
|
||||||
marshoai_enable_richtext_parse: bool = True
|
marshoai_enable_richtext_parse: bool = True
|
||||||
"""
|
"""
|
||||||
是否启用自动消息富文本解析 即若包含图片链接则发送图片、若包含LaTeX公式则发送公式图。
|
是否启用自动消息富文本解析 即若包含图片链接则发送图片、若包含LaTeX公式则发送公式图。
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ OPENAI_NEW_MODELS: list = [
|
|||||||
INTRODUCTION: str = f"""MarshoAI-NoneBot by LiteyukiStudio
|
INTRODUCTION: str = f"""MarshoAI-NoneBot by LiteyukiStudio
|
||||||
你好喵~我是一只可爱的猫娘AI,名叫小棉~🐾!
|
你好喵~我是一只可爱的猫娘AI,名叫小棉~🐾!
|
||||||
我的主页在这里哦~↓↓↓
|
我的主页在这里哦~↓↓↓
|
||||||
https://marshoai-docs.pages.liteyuki.icu
|
https://marsho.liteyuki.icu
|
||||||
|
|
||||||
※ 使用 「{config.marshoai_default_name}.status」命令获取状态信息。
|
※ 使用 「{config.marshoai_default_name}.status」命令获取状态信息。
|
||||||
※ 使用「{config.marshoai_default_name}.help」命令获取使用说明。"""
|
※ 使用「{config.marshoai_default_name}.help」命令获取使用说明。"""
|
||||||
|
|||||||
@@ -17,9 +17,9 @@ from nonebot.matcher import (
|
|||||||
current_event,
|
current_event,
|
||||||
current_matcher,
|
current_matcher,
|
||||||
)
|
)
|
||||||
from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg, get_message_id, get_target
|
from nonebot_plugin_alconna.uniseg import UniMessage, UniMsg
|
||||||
from openai import AsyncOpenAI, AsyncStream
|
from openai import AsyncOpenAI
|
||||||
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
from openai.types.chat import ChatCompletion, ChatCompletionMessage
|
||||||
|
|
||||||
from .config import config
|
from .config import config
|
||||||
from .constants import SUPPORT_IMAGE_MODELS
|
from .constants import SUPPORT_IMAGE_MODELS
|
||||||
@@ -35,7 +35,6 @@ from .util import (
|
|||||||
make_chat_openai,
|
make_chat_openai,
|
||||||
parse_richtext,
|
parse_richtext,
|
||||||
)
|
)
|
||||||
from .utils.request import process_chat_stream
|
|
||||||
|
|
||||||
|
|
||||||
class MarshoHandler:
|
class MarshoHandler:
|
||||||
@@ -50,8 +49,8 @@ class MarshoHandler:
|
|||||||
self.event: Event = current_event.get()
|
self.event: Event = current_event.get()
|
||||||
# self.state: T_State = current_handler.get().state
|
# self.state: T_State = current_handler.get().state
|
||||||
self.matcher: Matcher = current_matcher.get()
|
self.matcher: Matcher = current_matcher.get()
|
||||||
self.message_id: str = get_message_id(self.event)
|
self.message_id: str = UniMessage.get_message_id(self.event)
|
||||||
self.target = get_target(self.event)
|
self.target = UniMessage.get_target(self.event)
|
||||||
|
|
||||||
async def process_user_input(
|
async def process_user_input(
|
||||||
self, user_input: UniMsg, model_name: str
|
self, user_input: UniMsg, model_name: str
|
||||||
@@ -95,15 +94,14 @@ class MarshoHandler:
|
|||||||
self,
|
self,
|
||||||
user_message: Union[str, list],
|
user_message: Union[str, list],
|
||||||
model_name: str,
|
model_name: str,
|
||||||
tools_list: list | None,
|
tools_list: list,
|
||||||
tool_message: Optional[list] = None,
|
tool_message: Optional[list] = None,
|
||||||
stream: bool = False,
|
) -> ChatCompletion:
|
||||||
) -> Union[ChatCompletion, AsyncStream[ChatCompletionChunk]]:
|
|
||||||
"""
|
"""
|
||||||
处理单条聊天
|
处理单条聊天
|
||||||
"""
|
"""
|
||||||
|
|
||||||
context_msg = await get_prompt(model_name) + (
|
context_msg = get_prompt(model_name) + (
|
||||||
self.context.build(self.target.id, self.target.private)
|
self.context.build(self.target.id, self.target.private)
|
||||||
)
|
)
|
||||||
response = await make_chat_openai(
|
response = await make_chat_openai(
|
||||||
@@ -111,24 +109,20 @@ class MarshoHandler:
|
|||||||
msg=context_msg + [UserMessage(content=user_message).as_dict()] + (tool_message if tool_message else []), # type: ignore
|
msg=context_msg + [UserMessage(content=user_message).as_dict()] + (tool_message if tool_message else []), # type: ignore
|
||||||
model_name=model_name,
|
model_name=model_name,
|
||||||
tools=tools_list if tools_list else None,
|
tools=tools_list if tools_list else None,
|
||||||
stream=stream,
|
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
async def handle_function_call(
|
async def handle_function_call(
|
||||||
self,
|
self,
|
||||||
completion: Union[ChatCompletion],
|
completion: ChatCompletion,
|
||||||
user_message: Union[str, list],
|
user_message: Union[str, list],
|
||||||
model_name: str,
|
model_name: str,
|
||||||
tools_list: list | None = None,
|
tools_list: list,
|
||||||
):
|
):
|
||||||
# function call
|
# function call
|
||||||
# 需要获取额外信息,调用函数工具
|
# 需要获取额外信息,调用函数工具
|
||||||
tool_msg = []
|
tool_msg = []
|
||||||
if isinstance(completion, ChatCompletion):
|
choice = completion.choices[0]
|
||||||
choice = completion.choices[0]
|
|
||||||
else:
|
|
||||||
raise ValueError("Unexpected completion type")
|
|
||||||
# await UniMessage(str(response)).send()
|
# await UniMessage(str(response)).send()
|
||||||
tool_calls = choice.message.tool_calls
|
tool_calls = choice.message.tool_calls
|
||||||
# try:
|
# try:
|
||||||
@@ -188,7 +182,7 @@ class MarshoHandler:
|
|||||||
self,
|
self,
|
||||||
user_message: Union[str, list],
|
user_message: Union[str, list],
|
||||||
model_name: str,
|
model_name: str,
|
||||||
tools_list: list | None = None,
|
tools_list: list,
|
||||||
stream: bool = False,
|
stream: bool = False,
|
||||||
tool_message: Optional[list] = None,
|
tool_message: Optional[list] = None,
|
||||||
) -> Optional[Tuple[UserMessage, ChatCompletionMessage]]:
|
) -> Optional[Tuple[UserMessage, ChatCompletionMessage]]:
|
||||||
@@ -197,20 +191,14 @@ class MarshoHandler:
|
|||||||
"""
|
"""
|
||||||
global target_list
|
global target_list
|
||||||
if stream:
|
if stream:
|
||||||
response = await self.handle_stream_request(
|
raise NotImplementedError
|
||||||
user_message=user_message,
|
response = await self.handle_single_chat(
|
||||||
model_name=model_name,
|
user_message=user_message,
|
||||||
tools_list=tools_list,
|
model_name=model_name,
|
||||||
tools_message=tool_message,
|
tools_list=tools_list,
|
||||||
)
|
tool_message=tool_message,
|
||||||
else:
|
)
|
||||||
response = await self.handle_single_chat( # type: ignore
|
choice = response.choices[0]
|
||||||
user_message=user_message,
|
|
||||||
model_name=model_name,
|
|
||||||
tools_list=tools_list,
|
|
||||||
tool_message=tool_message,
|
|
||||||
)
|
|
||||||
choice = response.choices[0] # type: ignore
|
|
||||||
# Sprint(choice)
|
# Sprint(choice)
|
||||||
# 当tool_calls非空时,将finish_reason设置为TOOL_CALLS
|
# 当tool_calls非空时,将finish_reason设置为TOOL_CALLS
|
||||||
if choice.message.tool_calls is not None and config.marshoai_fix_toolcalls:
|
if choice.message.tool_calls is not None and config.marshoai_fix_toolcalls:
|
||||||
@@ -252,26 +240,3 @@ class MarshoHandler:
|
|||||||
else:
|
else:
|
||||||
await UniMessage(f"意外的完成原因:{choice.finish_reason}").send()
|
await UniMessage(f"意外的完成原因:{choice.finish_reason}").send()
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def handle_stream_request(
|
|
||||||
self,
|
|
||||||
user_message: Union[str, list],
|
|
||||||
model_name: str,
|
|
||||||
tools_list: list | None = None,
|
|
||||||
tools_message: Optional[list] = None,
|
|
||||||
) -> ChatCompletion:
|
|
||||||
"""
|
|
||||||
处理流式请求
|
|
||||||
"""
|
|
||||||
response = await self.handle_single_chat(
|
|
||||||
user_message=user_message,
|
|
||||||
model_name=model_name,
|
|
||||||
tools_list=None, # TODO:让流式调用支持工具调用
|
|
||||||
tool_message=tools_message,
|
|
||||||
stream=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
if isinstance(response, AsyncStream):
|
|
||||||
return await process_chat_stream(response)
|
|
||||||
else:
|
|
||||||
raise TypeError("Unexpected response type for stream request")
|
|
||||||
|
|||||||
@@ -15,14 +15,7 @@ from nonebot.params import CommandArg
|
|||||||
from nonebot.permission import SUPERUSER
|
from nonebot.permission import SUPERUSER
|
||||||
from nonebot.rule import to_me
|
from nonebot.rule import to_me
|
||||||
from nonebot.typing import T_State
|
from nonebot.typing import T_State
|
||||||
from nonebot_plugin_alconna import (
|
from nonebot_plugin_alconna import MsgTarget, UniMessage, UniMsg, on_alconna
|
||||||
Emoji,
|
|
||||||
MsgTarget,
|
|
||||||
UniMessage,
|
|
||||||
UniMsg,
|
|
||||||
message_reaction,
|
|
||||||
on_alconna,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .config import config
|
from .config import config
|
||||||
from .constants import INTRODUCTION, SUPPORT_IMAGE_MODELS
|
from .constants import INTRODUCTION, SUPPORT_IMAGE_MODELS
|
||||||
@@ -32,7 +25,6 @@ from .instances import client, context, model_name, target_list, tools
|
|||||||
from .metadata import metadata
|
from .metadata import metadata
|
||||||
from .plugin.func_call.caller import get_function_calls
|
from .plugin.func_call.caller import get_function_calls
|
||||||
from .util import *
|
from .util import *
|
||||||
from .utils.request import process_chat_stream
|
|
||||||
|
|
||||||
|
|
||||||
async def at_enable():
|
async def at_enable():
|
||||||
@@ -40,20 +32,20 @@ async def at_enable():
|
|||||||
|
|
||||||
|
|
||||||
changemodel_cmd = on_command(
|
changemodel_cmd = on_command(
|
||||||
"changemodel", permission=SUPERUSER, priority=96, block=True
|
"changemodel", permission=SUPERUSER, priority=10, block=True
|
||||||
)
|
)
|
||||||
# setprompt_cmd = on_command("prompt",permission=SUPERUSER)
|
# setprompt_cmd = on_command("prompt",permission=SUPERUSER)
|
||||||
praises_cmd = on_command("praises", permission=SUPERUSER, priority=96, block=True)
|
praises_cmd = on_command("praises", permission=SUPERUSER, priority=10, block=True)
|
||||||
add_usermsg_cmd = on_command("usermsg", permission=SUPERUSER, priority=96, block=True)
|
add_usermsg_cmd = on_command("usermsg", permission=SUPERUSER, priority=10, block=True)
|
||||||
add_assistantmsg_cmd = on_command(
|
add_assistantmsg_cmd = on_command(
|
||||||
"assistantmsg", permission=SUPERUSER, priority=96, block=True
|
"assistantmsg", permission=SUPERUSER, priority=10, block=True
|
||||||
)
|
)
|
||||||
contexts_cmd = on_command("contexts", permission=SUPERUSER, priority=96, block=True)
|
contexts_cmd = on_command("contexts", permission=SUPERUSER, priority=10, block=True)
|
||||||
save_context_cmd = on_command(
|
save_context_cmd = on_command(
|
||||||
"savecontext", permission=SUPERUSER, priority=96, block=True
|
"savecontext", permission=SUPERUSER, priority=10, block=True
|
||||||
)
|
)
|
||||||
load_context_cmd = on_command(
|
load_context_cmd = on_command(
|
||||||
"loadcontext", permission=SUPERUSER, priority=96, block=True
|
"loadcontext", permission=SUPERUSER, priority=10, block=True
|
||||||
)
|
)
|
||||||
marsho_cmd = on_alconna(
|
marsho_cmd = on_alconna(
|
||||||
Alconna(
|
Alconna(
|
||||||
@@ -61,42 +53,42 @@ marsho_cmd = on_alconna(
|
|||||||
Args["text?", AllParam],
|
Args["text?", AllParam],
|
||||||
),
|
),
|
||||||
aliases=tuple(config.marshoai_aliases),
|
aliases=tuple(config.marshoai_aliases),
|
||||||
priority=96,
|
priority=10,
|
||||||
block=True,
|
block=True,
|
||||||
)
|
)
|
||||||
resetmem_cmd = on_alconna(
|
resetmem_cmd = on_alconna(
|
||||||
Alconna(
|
Alconna(
|
||||||
config.marshoai_default_name + ".reset",
|
config.marshoai_default_name + ".reset",
|
||||||
),
|
),
|
||||||
priority=96,
|
priority=10,
|
||||||
block=True,
|
block=True,
|
||||||
)
|
)
|
||||||
marsho_help_cmd = on_alconna(
|
marsho_help_cmd = on_alconna(
|
||||||
Alconna(
|
Alconna(
|
||||||
config.marshoai_default_name + ".help",
|
config.marshoai_default_name + ".help",
|
||||||
),
|
),
|
||||||
priority=96,
|
priority=10,
|
||||||
block=True,
|
block=True,
|
||||||
)
|
)
|
||||||
marsho_status_cmd = on_alconna(
|
marsho_status_cmd = on_alconna(
|
||||||
Alconna(
|
Alconna(
|
||||||
config.marshoai_default_name + ".status",
|
config.marshoai_default_name + ".status",
|
||||||
),
|
),
|
||||||
priority=96,
|
priority=10,
|
||||||
block=True,
|
block=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
marsho_at = on_message(rule=to_me() & at_enable, priority=97)
|
marsho_at = on_message(rule=to_me() & at_enable, priority=11)
|
||||||
nickname_cmd = on_alconna(
|
nickname_cmd = on_alconna(
|
||||||
Alconna(
|
Alconna(
|
||||||
"nickname",
|
"nickname",
|
||||||
Args["name?", str],
|
Args["name?", str],
|
||||||
),
|
),
|
||||||
priority=96,
|
priority=10,
|
||||||
block=True,
|
block=True,
|
||||||
)
|
)
|
||||||
refresh_data_cmd = on_command(
|
refresh_data_cmd = on_command(
|
||||||
"refresh_data", permission=SUPERUSER, priority=96, block=True
|
"refresh_data", permission=SUPERUSER, priority=10, block=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -234,7 +226,6 @@ async def marsho(
|
|||||||
if not text:
|
if not text:
|
||||||
# 发送说明
|
# 发送说明
|
||||||
# await UniMessage(metadata.usage + "\n当前使用的模型:" + model_name).send()
|
# await UniMessage(metadata.usage + "\n当前使用的模型:" + model_name).send()
|
||||||
await message_reaction(Emoji("38"))
|
|
||||||
await marsho_cmd.finish(INTRODUCTION)
|
await marsho_cmd.finish(INTRODUCTION)
|
||||||
backup_context = await get_backup_context(target.id, target.private)
|
backup_context = await get_backup_context(target.id, target.private)
|
||||||
if backup_context:
|
if backup_context:
|
||||||
@@ -265,11 +256,8 @@ async def marsho(
|
|||||||
map(lambda v: v.data(), get_function_calls().values())
|
map(lambda v: v.data(), get_function_calls().values())
|
||||||
)
|
)
|
||||||
logger.info(f"正在获取回答,模型:{model_name}")
|
logger.info(f"正在获取回答,模型:{model_name}")
|
||||||
await message_reaction(Emoji("66"))
|
|
||||||
# logger.info(f"上下文:{context_msg}")
|
# logger.info(f"上下文:{context_msg}")
|
||||||
response = await handler.handle_common_chat(
|
response = await handler.handle_common_chat(usermsg, model_name, tools_lists)
|
||||||
usermsg, model_name, tools_lists, config.marshoai_stream
|
|
||||||
)
|
|
||||||
# await UniMessage(str(response)).send()
|
# await UniMessage(str(response)).send()
|
||||||
if response is not None:
|
if response is not None:
|
||||||
context_user, context_assistant = response
|
context_user, context_assistant = response
|
||||||
@@ -292,24 +280,20 @@ with contextlib.suppress(ImportError): # 优化先不做()
|
|||||||
async def poke(event: Event):
|
async def poke(event: Event):
|
||||||
|
|
||||||
user_nickname = await get_nickname_by_user_id(event.get_user_id())
|
user_nickname = await get_nickname_by_user_id(event.get_user_id())
|
||||||
usermsg = await get_prompt(model_name) + [
|
|
||||||
UserMessage(content=f"*{user_nickname}{config.marshoai_poke_suffix}"),
|
|
||||||
]
|
|
||||||
try:
|
try:
|
||||||
if config.marshoai_poke_suffix != "":
|
if config.marshoai_poke_suffix != "":
|
||||||
logger.info(f"收到戳一戳,用户昵称:{user_nickname}")
|
logger.info(f"收到戳一戳,用户昵称:{user_nickname}")
|
||||||
|
response = await make_chat_openai(
|
||||||
pre_response = await make_chat_openai(
|
|
||||||
client=client,
|
client=client,
|
||||||
model_name=model_name,
|
model_name=model_name,
|
||||||
msg=usermsg,
|
msg=get_prompt(model_name)
|
||||||
stream=config.marshoai_stream,
|
+ [
|
||||||
|
UserMessage(
|
||||||
|
content=f"*{user_nickname}{config.marshoai_poke_suffix}"
|
||||||
|
),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
if isinstance(pre_response, AsyncStream):
|
choice = response.choices[0]
|
||||||
response = await process_chat_stream(pre_response)
|
|
||||||
else:
|
|
||||||
response = pre_response
|
|
||||||
choice = response.choices[0] # type: ignore
|
|
||||||
if choice.finish_reason == CompletionsFinishReason.STOPPED:
|
if choice.finish_reason == CompletionsFinishReason.STOPPED:
|
||||||
content = extract_content_and_think(choice.message)[0]
|
content = extract_content_and_think(choice.message)[0]
|
||||||
await UniMessage(" " + str(content)).send(at_sender=True)
|
await UniMessage(" " + str(content)).send(at_sender=True)
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from .constants import USAGE
|
|||||||
|
|
||||||
metadata = PluginMetadata(
|
metadata = PluginMetadata(
|
||||||
name="Marsho AI 插件",
|
name="Marsho AI 插件",
|
||||||
description="接入 Azure API 或其他 API 的 AI 聊天插件,支持图片处理,外部函数调用,兼容包括 DeepSeek-R1, QwQ-32B 在内的多个模型",
|
description="接入 Azure API 或其他 API 的 AI 聊天插件,支持图片处理,外部函数调用,兼容包括 DeepSeek-R1 在内的多个模型",
|
||||||
usage=USAGE,
|
usage=USAGE,
|
||||||
type="application",
|
type="application",
|
||||||
config=ConfigModel,
|
config=ConfigModel,
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ marsho_memory_cmd = on_alconna(
|
|||||||
Subcommand("view", alias={"v"}),
|
Subcommand("view", alias={"v"}),
|
||||||
Subcommand("reset", alias={"r"}),
|
Subcommand("reset", alias={"r"}),
|
||||||
),
|
),
|
||||||
priority=96,
|
priority=10,
|
||||||
block=True,
|
block=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -2,9 +2,8 @@ import base64
|
|||||||
import json
|
import json
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import re
|
import re
|
||||||
import ssl
|
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any, Dict, List, Optional, Union
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
import aiofiles # type: ignore
|
import aiofiles # type: ignore
|
||||||
import httpx
|
import httpx
|
||||||
@@ -16,9 +15,9 @@ from nonebot.log import logger
|
|||||||
from nonebot_plugin_alconna import Image as ImageMsg
|
from nonebot_plugin_alconna import Image as ImageMsg
|
||||||
from nonebot_plugin_alconna import Text as TextMsg
|
from nonebot_plugin_alconna import Text as TextMsg
|
||||||
from nonebot_plugin_alconna import UniMessage
|
from nonebot_plugin_alconna import UniMessage
|
||||||
from openai import AsyncOpenAI, AsyncStream, NotGiven
|
from openai import AsyncOpenAI, NotGiven
|
||||||
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
from openai.types.chat import ChatCompletion, ChatCompletionMessage
|
||||||
from zhDateTime import DateTime # type: ignore
|
from zhDateTime import DateTime
|
||||||
|
|
||||||
from ._types import DeveloperMessage
|
from ._types import DeveloperMessage
|
||||||
from .cache.decos import *
|
from .cache.decos import *
|
||||||
@@ -59,8 +58,6 @@ _praises_init_data = {
|
|||||||
"""
|
"""
|
||||||
初始夸赞名单之数据
|
初始夸赞名单之数据
|
||||||
"""
|
"""
|
||||||
_ssl_context = ssl.create_default_context()
|
|
||||||
_ssl_context.set_ciphers("DEFAULT")
|
|
||||||
|
|
||||||
|
|
||||||
async def get_image_raw_and_type(
|
async def get_image_raw_and_type(
|
||||||
@@ -77,7 +74,7 @@ async def get_image_raw_and_type(
|
|||||||
tuple[bytes, str]: 图片二进制数据, 图片MIME格式
|
tuple[bytes, str]: 图片二进制数据, 图片MIME格式
|
||||||
"""
|
"""
|
||||||
|
|
||||||
async with httpx.AsyncClient(verify=_ssl_context) as client:
|
async with httpx.AsyncClient() as client:
|
||||||
response = await client.get(url, headers=_browser_headers, timeout=timeout)
|
response = await client.get(url, headers=_browser_headers, timeout=timeout)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
# 获取图片数据
|
# 获取图片数据
|
||||||
@@ -101,7 +98,9 @@ async def get_image_b64(url: str, timeout: int = 10) -> Optional[str]:
|
|||||||
return: 图片base64编码
|
return: 图片base64编码
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if data_type := await get_image_raw_and_type(url, timeout):
|
if data_type := await get_image_raw_and_type(
|
||||||
|
url.replace("https://", "http://"), timeout
|
||||||
|
):
|
||||||
# image_format = content_type.split("/")[1] if content_type else "jpeg"
|
# image_format = content_type.split("/")[1] if content_type else "jpeg"
|
||||||
base64_image = base64.b64encode(data_type[0]).decode("utf-8")
|
base64_image = base64.b64encode(data_type[0]).decode("utf-8")
|
||||||
data_url = "data:{};base64,{}".format(data_type[1], base64_image)
|
data_url = "data:{};base64,{}".format(data_type[1], base64_image)
|
||||||
@@ -110,13 +109,35 @@ async def get_image_b64(url: str, timeout: int = 10) -> Optional[str]:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def make_chat(
|
||||||
|
client: ChatCompletionsClient,
|
||||||
|
msg: list,
|
||||||
|
model_name: str,
|
||||||
|
tools: Optional[list] = None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
调用ai获取回复
|
||||||
|
|
||||||
|
参数:
|
||||||
|
client: 用于与AI模型进行通信
|
||||||
|
msg: 消息内容
|
||||||
|
model_name: 指定AI模型名
|
||||||
|
tools: 工具列表
|
||||||
|
"""
|
||||||
|
return await client.complete(
|
||||||
|
messages=msg,
|
||||||
|
model=model_name,
|
||||||
|
tools=tools,
|
||||||
|
**config.marshoai_model_args,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def make_chat_openai(
|
async def make_chat_openai(
|
||||||
client: AsyncOpenAI,
|
client: AsyncOpenAI,
|
||||||
msg: list,
|
msg: list,
|
||||||
model_name: str,
|
model_name: str,
|
||||||
tools: Optional[list] = None,
|
tools: Optional[list] = None,
|
||||||
stream: bool = False,
|
) -> ChatCompletion:
|
||||||
) -> Union[ChatCompletion, AsyncStream[ChatCompletionChunk]]:
|
|
||||||
"""
|
"""
|
||||||
使用 Openai SDK 调用ai获取回复
|
使用 Openai SDK 调用ai获取回复
|
||||||
|
|
||||||
@@ -131,21 +152,20 @@ async def make_chat_openai(
|
|||||||
model=model_name,
|
model=model_name,
|
||||||
tools=tools or NOT_GIVEN,
|
tools=tools or NOT_GIVEN,
|
||||||
timeout=config.marshoai_timeout,
|
timeout=config.marshoai_timeout,
|
||||||
stream=stream,
|
|
||||||
**config.marshoai_model_args,
|
**config.marshoai_model_args,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@from_cache("praises")
|
@from_cache("praises")
|
||||||
async def get_praises():
|
def get_praises():
|
||||||
praises_file = store.get_plugin_data_file(
|
praises_file = store.get_plugin_data_file(
|
||||||
"praises.json"
|
"praises.json"
|
||||||
) # 夸赞名单文件使用localstore存储
|
) # 夸赞名单文件使用localstore存储
|
||||||
if not praises_file.exists():
|
if not praises_file.exists():
|
||||||
async with aiofiles.open(praises_file, "w", encoding="utf-8") as f:
|
with open(praises_file, "w", encoding="utf-8") as f:
|
||||||
await f.write(json.dumps(_praises_init_data, ensure_ascii=False, indent=4))
|
json.dump(_praises_init_data, f, ensure_ascii=False, indent=4)
|
||||||
async with aiofiles.open(praises_file, "r", encoding="utf-8") as f:
|
with open(praises_file, "r", encoding="utf-8") as f:
|
||||||
data = json.loads(await f.read())
|
data = json.load(f)
|
||||||
praises_json = data
|
praises_json = data
|
||||||
return praises_json
|
return praises_json
|
||||||
|
|
||||||
@@ -161,8 +181,8 @@ async def refresh_praises_json():
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
async def build_praises() -> str:
|
def build_praises() -> str:
|
||||||
praises = await get_praises()
|
praises = get_praises()
|
||||||
result = ["你喜欢以下几个人物,他们有各自的优点:"]
|
result = ["你喜欢以下几个人物,他们有各自的优点:"]
|
||||||
for item in praises["like"]:
|
for item in praises["like"]:
|
||||||
result.append(f"名字:{item['name']},优点:{item['advantages']}")
|
result.append(f"名字:{item['name']},优点:{item['advantages']}")
|
||||||
@@ -214,8 +234,8 @@ async def set_nickname(user_id: str, name: str):
|
|||||||
data[user_id] = name
|
data[user_id] = name
|
||||||
if name == "" and user_id in data:
|
if name == "" and user_id in data:
|
||||||
del data[user_id]
|
del data[user_id]
|
||||||
async with aiofiles.open(filename, "w", encoding="utf-8") as f:
|
with open(filename, "w", encoding="utf-8") as f:
|
||||||
await f.write(json.dumps(data, ensure_ascii=False, indent=4))
|
json.dump(data, f, ensure_ascii=False, indent=4)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
@@ -238,11 +258,11 @@ async def refresh_nickname_json():
|
|||||||
logger.error("刷新 nickname_json 表错误:无法载入 nickname.json 文件")
|
logger.error("刷新 nickname_json 表错误:无法载入 nickname.json 文件")
|
||||||
|
|
||||||
|
|
||||||
async def get_prompt(model: str) -> List[Dict[str, Any]]:
|
def get_prompt(model: str) -> List[Dict[str, Any]]:
|
||||||
"""获取系统提示词"""
|
"""获取系统提示词"""
|
||||||
prompts = config.marshoai_additional_prompt
|
prompts = config.marshoai_additional_prompt
|
||||||
if config.marshoai_enable_praises:
|
if config.marshoai_enable_praises:
|
||||||
praises_prompt = await build_praises()
|
praises_prompt = build_praises()
|
||||||
prompts += praises_prompt
|
prompts += praises_prompt
|
||||||
|
|
||||||
if config.marshoai_enable_time_prompt:
|
if config.marshoai_enable_time_prompt:
|
||||||
|
|||||||
@@ -1,71 +0,0 @@
|
|||||||
from nonebot.log import logger
|
|
||||||
from openai import AsyncStream
|
|
||||||
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage
|
|
||||||
from openai.types.chat.chat_completion import Choice
|
|
||||||
|
|
||||||
|
|
||||||
async def process_chat_stream(
|
|
||||||
stream: AsyncStream[ChatCompletionChunk],
|
|
||||||
) -> ChatCompletion:
|
|
||||||
reasoning_contents = ""
|
|
||||||
answer_contents = ""
|
|
||||||
last_chunk = None
|
|
||||||
is_first_token_appeared = False
|
|
||||||
is_answering = False
|
|
||||||
async for chunk in stream:
|
|
||||||
last_chunk = chunk
|
|
||||||
# print(chunk)
|
|
||||||
if not is_first_token_appeared:
|
|
||||||
logger.debug(f"{chunk.id}: 第一个 token 已出现")
|
|
||||||
is_first_token_appeared = True
|
|
||||||
if not chunk.choices:
|
|
||||||
logger.info("Usage:", chunk.usage)
|
|
||||||
else:
|
|
||||||
delta = chunk.choices[0].delta
|
|
||||||
if (
|
|
||||||
hasattr(delta, "reasoning_content")
|
|
||||||
and delta.reasoning_content is not None
|
|
||||||
):
|
|
||||||
reasoning_contents += delta.reasoning_content
|
|
||||||
else:
|
|
||||||
if not is_answering:
|
|
||||||
logger.debug(
|
|
||||||
f"{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出"
|
|
||||||
)
|
|
||||||
is_answering = True
|
|
||||||
if delta.content is not None:
|
|
||||||
answer_contents += delta.content
|
|
||||||
# print(last_chunk)
|
|
||||||
# 创建新的 ChatCompletion 对象
|
|
||||||
if last_chunk and last_chunk.choices:
|
|
||||||
message = ChatCompletionMessage(
|
|
||||||
content=answer_contents,
|
|
||||||
role="assistant",
|
|
||||||
tool_calls=last_chunk.choices[0].delta.tool_calls, # type: ignore
|
|
||||||
)
|
|
||||||
if reasoning_contents != "":
|
|
||||||
setattr(message, "reasoning_content", reasoning_contents)
|
|
||||||
choice = Choice(
|
|
||||||
finish_reason=last_chunk.choices[0].finish_reason, # type: ignore
|
|
||||||
index=last_chunk.choices[0].index,
|
|
||||||
message=message,
|
|
||||||
)
|
|
||||||
return ChatCompletion(
|
|
||||||
id=last_chunk.id,
|
|
||||||
choices=[choice],
|
|
||||||
created=last_chunk.created,
|
|
||||||
model=last_chunk.model,
|
|
||||||
system_fingerprint=last_chunk.system_fingerprint,
|
|
||||||
object="chat.completion",
|
|
||||||
usage=last_chunk.usage,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return ChatCompletion(
|
|
||||||
id="",
|
|
||||||
choices=[],
|
|
||||||
created=0,
|
|
||||||
model="",
|
|
||||||
system_fingerprint="",
|
|
||||||
object="chat.completion",
|
|
||||||
usage=None,
|
|
||||||
)
|
|
||||||
8
pdm.lock
generated
8
pdm.lock
generated
@@ -5,7 +5,7 @@
|
|||||||
groups = ["default", "dev", "test"]
|
groups = ["default", "dev", "test"]
|
||||||
strategy = ["inherit_metadata"]
|
strategy = ["inherit_metadata"]
|
||||||
lock_version = "4.5.0"
|
lock_version = "4.5.0"
|
||||||
content_hash = "sha256:9dd3edfe69c332deac360af2685358e82c5dac0870900668534fc6f1d34040f8"
|
content_hash = "sha256:d7ab3d9ca825de512d4f87ec846f7fddcf3d5796a7c9562e60c8c7d39c058817"
|
||||||
|
|
||||||
[[metadata.targets]]
|
[[metadata.targets]]
|
||||||
requires_python = "~=3.10"
|
requires_python = "~=3.10"
|
||||||
@@ -1485,7 +1485,7 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nonebot-plugin-alconna"
|
name = "nonebot-plugin-alconna"
|
||||||
version = "0.57.0"
|
version = "0.54.1"
|
||||||
requires_python = ">=3.9"
|
requires_python = ">=3.9"
|
||||||
summary = "Alconna Adapter for Nonebot"
|
summary = "Alconna Adapter for Nonebot"
|
||||||
groups = ["default"]
|
groups = ["default"]
|
||||||
@@ -1499,8 +1499,8 @@ dependencies = [
|
|||||||
"tarina<0.7,>=0.6.8",
|
"tarina<0.7,>=0.6.8",
|
||||||
]
|
]
|
||||||
files = [
|
files = [
|
||||||
{file = "nonebot_plugin_alconna-0.57.0-py3-none-any.whl", hash = "sha256:6c4bcce1a9aa176244b4c011b19b1cea00269c4c6794cd4e90d8dd7990ec3ec9"},
|
{file = "nonebot_plugin_alconna-0.54.1-py3-none-any.whl", hash = "sha256:4edb4b081cd64ce37717c7a92d31aadd2cf287a5a0adc2ac86ed82d9bcad5048"},
|
||||||
{file = "nonebot_plugin_alconna-0.57.0.tar.gz", hash = "sha256:7a9a4bf373f3f6836611dbde1a0917b84441a534dd6f2b20dae3ba6fff142858"},
|
{file = "nonebot_plugin_alconna-0.54.1.tar.gz", hash = "sha256:66fae03120b8eff25bb0027d65f149e399aa6f73c7585ebdd388d1904cecdeee"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ authors = [
|
|||||||
]
|
]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nonebot2>=2.4.0",
|
"nonebot2>=2.4.0",
|
||||||
"nonebot-plugin-alconna>=0.57.0",
|
"nonebot-plugin-alconna>=0.48.0",
|
||||||
"nonebot-plugin-localstore>=0.7.1",
|
"nonebot-plugin-localstore>=0.7.1",
|
||||||
"zhDatetime>=2.0.0",
|
"zhDatetime>=2.0.0",
|
||||||
"aiohttp>=3.9",
|
"aiohttp>=3.9",
|
||||||
@@ -34,7 +34,7 @@ dependencies = [
|
|||||||
license = { text = "MIT, Mulan PSL v2" }
|
license = { text = "MIT, Mulan PSL v2" }
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
Homepage = "https://marshoai-docs.pages.liteyuki.icu/"
|
Homepage = "https://marsho.liteyuki.icu/"
|
||||||
|
|
||||||
|
|
||||||
[tool.nonebot]
|
[tool.nonebot]
|
||||||
|
|||||||
Reference in New Issue
Block a user