mirror of
https://github.com/nonebot/nonebot2.git
synced 2025-10-07 11:16:43 +00:00
Compare commits
414 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
d1601bf2fe | ||
|
2994945c64 | ||
|
c9e3cad738 | ||
|
7c36964812 | ||
|
0e02d13c67 | ||
|
f7aeea2f3d | ||
|
b2da7d4cae | ||
|
239f9769c2 | ||
|
f5947518b1 | ||
|
1a4afa406b | ||
|
412b879f39 | ||
|
a830346545 | ||
|
fbb8320a25 | ||
|
14f4a0f701 | ||
|
e82e2817d5 | ||
|
ffbd1f9aeb | ||
|
5ab418a3cf | ||
|
a58e00b206 | ||
|
a74682bbf6 | ||
|
11142253fb | ||
|
ef7782167f | ||
|
f4a2682e6c | ||
|
35cee22cf6 | ||
|
fbb55228f2 | ||
|
391ac00d81 | ||
|
277b744ca3 | ||
|
a89c67a50e | ||
|
26b30a7b22 | ||
|
4dae23d3bb | ||
|
07e6c3f977 | ||
|
dace63d9d2 | ||
|
2ebf956599 | ||
|
b20793c67a | ||
|
47e9f59cc8 | ||
|
e27cac7fef | ||
|
5bfda6e2bc | ||
|
ef2ab7df48 | ||
|
ac1d9147d2 | ||
|
f2350909d2 | ||
|
f14ef93808 | ||
|
45bd4252bf | ||
|
6b4456bf0e | ||
|
c5e114dc7f | ||
|
30ceea4287 | ||
|
380f9ff013 | ||
|
19ac119714 | ||
|
236f70183c | ||
|
117bc35653 | ||
|
4fcaa8d3d6 | ||
|
536889d3df | ||
|
bbd13c04cc | ||
|
82e4ccb227 | ||
|
626cfa474f | ||
|
18e9a9afd3 | ||
|
41b7d5a3a0 | ||
|
16fcd4c639 | ||
|
ef3641efa6 | ||
|
8d95a32672 | ||
|
3a3a718779 | ||
|
3d1955211a | ||
|
8d87715d6f | ||
|
3c535b8e99 | ||
|
2c6affecea | ||
|
c2d2169a9f | ||
|
1153c5ff17 | ||
|
6c532f5926 | ||
|
7083394bc9 | ||
|
7c58410868 | ||
|
00c3e3b713 | ||
|
9d4a72766d | ||
|
82e16b4438 | ||
|
56353f2d0a | ||
|
4d0eb94a6f | ||
|
e1a494ecbd | ||
|
6b1e34da63 | ||
|
ccf9597102 | ||
|
5a6f4b9e1c | ||
|
9b09b42f97 | ||
|
854345e16f | ||
|
e0ee865b87 | ||
|
dad0c01335 | ||
|
79ef5af19b | ||
|
b349959f93 | ||
|
2e7f9612af | ||
|
8ff2303b22 | ||
|
b681fdd6d6 | ||
|
b65b3b438c | ||
|
580d6bab36 | ||
|
90349ddd7d | ||
|
dcac421bc0 | ||
|
b4f643577f | ||
|
411e7168b3 | ||
|
fef072a62a | ||
|
f529e9cb23 | ||
|
cfa3bfd88c | ||
|
321c99f12b | ||
|
73ad4992ee | ||
|
ddbf37c1be | ||
|
b9392371c7 | ||
|
d3c26a1548 | ||
|
31c2a61cce | ||
|
f84ba9768b | ||
|
1faa935527 | ||
|
5f940ff309 | ||
|
4c4c0ea0ba | ||
|
787b40a99e | ||
|
fd6a0ae747 | ||
|
298a32c096 | ||
|
aecff5ffd6 | ||
|
c1a6b7b787 | ||
|
0903f19f9c | ||
|
51aa23817a | ||
|
8f3f385cb6 | ||
|
915274081d | ||
|
a388c52b3f | ||
|
b4d3cd4d4d | ||
|
50c03b0675 | ||
|
fa3bb96417 | ||
|
09bde57835 | ||
|
76ac2a8843 | ||
|
f6ec6962ab | ||
|
28ad6829cd | ||
|
7f4b002a87 | ||
|
7e073b6ff4 | ||
|
fa3781efe5 | ||
|
bec74d85cd | ||
|
abc3829c64 | ||
|
18f5d6eab9 | ||
|
00f3e30930 | ||
|
97cd21d004 | ||
|
09b4d44f23 | ||
|
3536bf56bd | ||
|
f8eaf5def0 | ||
|
6077f85e52 | ||
|
e2976a3859 | ||
|
1e25fde22e | ||
|
55d88b7dae | ||
|
de30f8917f | ||
|
52653fa005 | ||
|
4628358add | ||
|
117b08a73e | ||
|
700888a8e0 | ||
|
ef882927f3 | ||
|
af9327de14 | ||
|
2881d42bf5 | ||
|
dc3a49fe57 | ||
|
addabd6396 | ||
|
3341c641cc | ||
|
363413e1e6 | ||
|
b675d27a30 | ||
|
796023408a | ||
|
983a8512b2 | ||
|
6593102632 | ||
|
65fff13150 | ||
|
edd1a140d7 | ||
|
18070baad4 | ||
|
acf729f6e7 | ||
|
6dbc8eac03 | ||
|
35944bcbdc | ||
|
3f919f91c1 | ||
|
443a20d83d | ||
|
2fca26eaae | ||
|
ebc8141971 | ||
|
5d6bcc9b9b | ||
|
55fca332ba | ||
|
6b65c5fe69 | ||
|
3e4dbe1015 | ||
|
20197e64b2 | ||
|
94eecaf448 | ||
|
fa91e0e79b | ||
|
891adc38fc | ||
|
af6cc63db2 | ||
|
af73e14b64 | ||
|
9305fe7875 | ||
|
613fde4639 | ||
|
61db2c898b | ||
|
acf313c420 | ||
|
15fca08641 | ||
|
e2cbe3c1f8 | ||
|
d3883ea3ae | ||
|
8b2c4b3e60 | ||
|
65d0d00591 | ||
|
97a57c2f6e | ||
|
6559b2ff27 | ||
|
4c1deeb899 | ||
|
a65ea6805d | ||
|
effe65b034 | ||
|
37296cf048 | ||
|
1b597c1301 | ||
|
c2454d0689 | ||
|
9b60b44554 | ||
|
75516bdafb | ||
|
12f5a487c1 | ||
|
8d128d5035 | ||
|
cfa7117e64 | ||
|
7880bf0dc1 | ||
|
0054041829 | ||
|
99931f785a | ||
|
5e121269f0 | ||
|
38ced0243f | ||
|
869db878e1 | ||
|
e6c6e355e1 | ||
|
6221b9a5fd | ||
|
5f2c9c935b | ||
|
76559b253c | ||
|
3c54655c39 | ||
|
7a851ac199 | ||
|
b2ba5dfcd1 | ||
|
4a4fae8f8c | ||
|
de894ce7b2 | ||
|
09c4a955c9 | ||
|
db1581a0a2 | ||
|
db9d7b3060 | ||
|
7e0c29472e | ||
|
d13492070d | ||
|
695ede51ea | ||
|
168f382aa6 | ||
|
5bd433318d | ||
|
d1cd2a793e | ||
|
5a4464f338 | ||
|
561d25320b | ||
|
b225c2dd3b | ||
|
2a2e357513 | ||
|
28bfe1ecb8 | ||
|
cc12f0af7e | ||
|
da831a1b08 | ||
|
eb97be17dd | ||
|
2dd1c9b2ad | ||
|
41191db863 | ||
|
ee20204b22 | ||
|
f1032804bb | ||
|
ba1540d75b | ||
|
f5c87f80e1 | ||
|
d2d7603ff5 | ||
|
56013dca48 | ||
|
d33ed4a69f | ||
|
ed753b5564 | ||
|
7e65552d01 | ||
|
f77dc523e6 | ||
|
0d84bf3592 | ||
|
94dff49e60 | ||
|
5d4cf7e421 | ||
|
0e3e16e809 | ||
|
183fc8defb | ||
|
8712e89322 | ||
|
e2b49f9b65 | ||
|
7e11f3a3d6 | ||
|
71bebb6ec7 | ||
|
842c6ff4c6 | ||
|
7754f6da1d | ||
|
60e0752f1a | ||
|
ede1a20c53 | ||
|
04289fd50f | ||
|
ba3efa9e7c | ||
|
c5a66a6ed0 | ||
|
8a23b1554a | ||
|
d73f226cbd | ||
|
fd9ba678ec | ||
|
d29ba62ff9 | ||
|
00c97fd18f | ||
|
9531c3fa74 | ||
|
94293122e8 | ||
|
7aaa66c8ba | ||
|
0030bf725e | ||
|
22b6062900 | ||
|
005968ab70 | ||
|
dc6c194701 | ||
|
9b8772b590 | ||
|
ae8ba9f55d | ||
|
f4a7ce2c09 | ||
|
c84723668f | ||
|
bd3ed4207a | ||
|
1e8c2cfc9f | ||
|
5ce0238ace | ||
|
4e6b52b85c | ||
|
05fe7bb715 | ||
|
c555e2fac6 | ||
|
fd126ae154 | ||
|
6c7b6a9575 | ||
|
c4716e3e17 | ||
|
3601a33f20 | ||
|
451023518b | ||
|
2bd377a221 | ||
|
66384adad4 | ||
|
ec1f7ba5bc | ||
|
e7fc5b7b7e | ||
|
11477ea9d7 | ||
|
6adf40f45d | ||
|
1bdf169980 | ||
|
81cb356503 | ||
|
805778794c | ||
|
28cd8dd08a | ||
|
139b39984e | ||
|
f9b5fece80 | ||
|
8076c6bc0a | ||
|
44b89d13f8 | ||
|
fbc4225110 | ||
|
f07f35ccc1 | ||
|
111dfbf164 | ||
|
c713c7723b | ||
|
4fa2af41b0 | ||
|
39c09d22d1 | ||
|
4819b21f52 | ||
|
6ef6721527 | ||
|
14cb447874 | ||
|
1b2b89074d | ||
|
75c5678782 | ||
|
45ec5cdfb4 | ||
|
f6dd98825b | ||
|
f59271bd47 | ||
|
79f833b946 | ||
|
9ad562bbfd | ||
|
267b49247d | ||
|
dbda4150fb | ||
|
a4e17f0c49 | ||
|
8d8d1169d1 | ||
|
7bc9e61985 | ||
|
35cc6011b5 | ||
|
086af8fd22 | ||
|
a60d1520e6 | ||
|
30c22ba25a | ||
|
41fbaec42c | ||
|
562ec79e3b | ||
|
f620bd8eb2 | ||
|
13e40458d7 | ||
|
dc4ac6d8d7 | ||
|
41498bdf21 | ||
|
b8eae2eb82 | ||
|
039c2b5509 | ||
|
2e635370bb | ||
|
807a86371d | ||
|
c66953779c | ||
|
117ef18f1c | ||
|
520dd03d77 | ||
|
63f3ca2f6f | ||
|
2e8230e9f4 | ||
|
4bfea99e54 | ||
|
f58eba7975 | ||
|
53d1de4aec | ||
|
00f18c1bd8 | ||
|
ba4fbb2ec3 | ||
|
b3722bd637 | ||
|
012bd6d4fb | ||
|
9c4ca28d61 | ||
|
53bcae04ff | ||
|
754c54e268 | ||
|
f97fbc814e | ||
|
b8856a0577 | ||
|
1c0e88907b | ||
|
31b6df5b39 | ||
|
bca9e4fd08 | ||
|
026ceb5028 | ||
|
47d5a647b7 | ||
|
37d7230949 | ||
|
be458b1d5e | ||
|
f375a4a723 | ||
|
3edce9a630 | ||
|
c525bda1e0 | ||
|
417f586e0d | ||
|
80d7e68835 | ||
|
a284e6df5c | ||
|
7176a69f81 | ||
|
e3a1c02e8a | ||
|
5e789ae4e0 | ||
|
bb684e20cb | ||
|
e11293e46b | ||
|
e0d74a1657 | ||
|
fdd36565b1 | ||
|
28c53fe0d7 | ||
|
26539bf2b1 | ||
|
347889c822 | ||
|
91849b762c | ||
|
5d1319ddb9 | ||
|
d98228926e | ||
|
493997d998 | ||
|
3098b7c153 | ||
|
2b0a050226 | ||
|
1f3abc2bb9 | ||
|
dd5541e658 | ||
|
a76bf27f60 | ||
|
d70ce366cc | ||
|
f94b802c9b | ||
|
17d7bd4e31 | ||
|
76a40b60ff | ||
|
469efedab2 | ||
|
383699a8b4 | ||
|
1b9a07b923 | ||
|
15b76c266c | ||
|
dfdecaddb1 | ||
|
5de9de903d | ||
|
327f3fa441 | ||
|
08fde7580c | ||
|
4ca91ecc7e | ||
|
885db90bc0 | ||
|
c43d631eb5 | ||
|
cfda433d14 | ||
|
ea4a27bf89 | ||
|
23944833f2 | ||
|
4a40782be0 | ||
|
babafcaa87 | ||
|
9b164a6f5a | ||
|
4a07981972 | ||
|
6bb2c46f8a | ||
|
2054655912 | ||
|
062af45367 | ||
|
83c3ed5966 | ||
|
a2f2b818a7 | ||
|
e7941efd9a | ||
|
aa6faba9ae | ||
|
8ca72f3c64 | ||
|
45e10e7139 | ||
|
73d1b19669 | ||
|
ad4cf86a96 | ||
|
48b3e3aaf3 |
@@ -4,7 +4,7 @@
|
||||
"features": {
|
||||
"ghcr.io/devcontainers-contrib/features/poetry:2": {}
|
||||
},
|
||||
"postCreateCommand": "poetry config virtualenvs.in-project true && poetry install -E all && poetry run pre-commit install && yarn install",
|
||||
"postCreateCommand": "./scripts/setup-envs.sh",
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
|
6
.eslintignore
Normal file
6
.eslintignore
Normal file
@@ -0,0 +1,6 @@
|
||||
dist
|
||||
node_modules
|
||||
.yarn
|
||||
.history
|
||||
build
|
||||
lib
|
85
.eslintrc.js
Normal file
85
.eslintrc.js
Normal file
@@ -0,0 +1,85 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
env: {
|
||||
browser: true,
|
||||
commonjs: true,
|
||||
node: true,
|
||||
},
|
||||
parser: "@typescript-eslint/parser",
|
||||
parserOptions: {
|
||||
tsconfigRootDir: __dirname,
|
||||
project: ["./tsconfig.json", "./website/tsconfig.json"],
|
||||
},
|
||||
globals: {
|
||||
JSX: true,
|
||||
},
|
||||
extends: [
|
||||
"eslint:recommended",
|
||||
"plugin:react/recommended",
|
||||
"plugin:react-hooks/recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:import/recommended",
|
||||
"plugin:regexp/recommended",
|
||||
"plugin:prettier/recommended",
|
||||
],
|
||||
settings: {
|
||||
"import/resolver": {
|
||||
node: {
|
||||
extensions: [".js", ".jsx", ".ts", ".tsx"],
|
||||
},
|
||||
typescript: true,
|
||||
},
|
||||
react: {
|
||||
version: "detect",
|
||||
},
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: ["*.ts", "*.tsx"],
|
||||
rules: {
|
||||
"import/no-unresolved": "off",
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ["*.js", "*.cjs"],
|
||||
rules: {
|
||||
"@typescript-eslint/no-var-requires": "off",
|
||||
},
|
||||
},
|
||||
],
|
||||
plugins: ["@typescript-eslint"],
|
||||
rules: {
|
||||
"linebreak-style": ["error", "unix"],
|
||||
quotes: ["error", "double", { avoidEscape: true }],
|
||||
semi: ["error", "always"],
|
||||
"@typescript-eslint/no-non-null-assertion": "off",
|
||||
"import/order": [
|
||||
"error",
|
||||
{
|
||||
groups: [
|
||||
"builtin",
|
||||
"external",
|
||||
"internal",
|
||||
"parent",
|
||||
"sibling",
|
||||
"index",
|
||||
],
|
||||
pathGroups: [
|
||||
{ pattern: "react", group: "builtin", position: "before" },
|
||||
{ pattern: "fs-extra", group: "builtin" },
|
||||
{ pattern: "lodash", group: "external", position: "before" },
|
||||
{ pattern: "clsx", group: "external", position: "before" },
|
||||
{ pattern: "@theme/**", group: "internal" },
|
||||
{ pattern: "@site/**", group: "internal" },
|
||||
{ pattern: "@theme-init/**", group: "internal" },
|
||||
{ pattern: "@theme-original/**", group: "internal" },
|
||||
],
|
||||
pathGroupsExcludedImportTypes: [],
|
||||
"newlines-between": "always",
|
||||
alphabetize: {
|
||||
order: "asc",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
website/versioned_*/** linguist-documentation
|
16
.github/actions/setup-node/action.yml
vendored
16
.github/actions/setup-node/action.yml
vendored
@@ -4,18 +4,10 @@ description: Setup Node
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- uses: actions/setup-node@v2
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "16"
|
||||
node-version: "18"
|
||||
cache: "yarn"
|
||||
|
||||
- id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
|
||||
- run: yarn install
|
||||
- run: yarn install --frozen-lockfile
|
||||
shell: bash
|
||||
|
21
.github/actions/setup-python/action.yml
vendored
21
.github/actions/setup-python/action.yml
vendored
@@ -6,6 +6,14 @@ inputs:
|
||||
description: Python version
|
||||
required: false
|
||||
default: "3.10"
|
||||
env-dir:
|
||||
description: Environment directory
|
||||
required: false
|
||||
default: "."
|
||||
no-root:
|
||||
description: Do not install package in the environment
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
@@ -14,11 +22,20 @@ runs:
|
||||
run: pipx install poetry
|
||||
shell: bash
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
architecture: "x64"
|
||||
cache: "poetry"
|
||||
cache-dependency-path: |
|
||||
./poetry.lock
|
||||
${{ inputs.env-dir }}/poetry.lock
|
||||
|
||||
- run: poetry install -E all
|
||||
- run: |
|
||||
cd ${{ inputs.env-dir }}
|
||||
if [ "${{ inputs.no-root }}" = "true" ]; then
|
||||
poetry install --all-extras --no-root
|
||||
else
|
||||
poetry install --all-extras
|
||||
fi
|
||||
shell: bash
|
||||
|
31
.github/dependabot.yml
vendored
31
.github/dependabot.yml
vendored
@@ -4,3 +4,34 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/actions/build-api-doc"
|
||||
schedule:
|
||||
interval: daily
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/actions/setup-node"
|
||||
schedule:
|
||||
interval: daily
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/actions/setup-python"
|
||||
schedule:
|
||||
interval: daily
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
|
25
.github/workflows/codecov.yml
vendored
25
.github/workflows/codecov.yml
vendored
@@ -6,42 +6,53 @@ on:
|
||||
- master
|
||||
pull_request:
|
||||
paths:
|
||||
- "envs/**"
|
||||
- "nonebot/**"
|
||||
- "packages/**"
|
||||
- "tests/**"
|
||||
- ".github/actions/setup-python/**"
|
||||
- ".github/workflows/codecov.yml"
|
||||
- "pyproject.toml"
|
||||
- "poetry.lock"
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test Coverage
|
||||
runs-on: ${{ matrix.os }}
|
||||
concurrency:
|
||||
group: test-coverage-${{ github.ref }}-${{ matrix.os }}-${{ matrix.python-version }}
|
||||
group: test-coverage-${{ github.ref }}-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.env }}
|
||||
cancel-in-progress: true
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
env: [pydantic-v1, pydantic-v2]
|
||||
fail-fast: false
|
||||
env:
|
||||
OS: ${{ matrix.os }}
|
||||
PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
PYDANTIC_VERSION: ${{ matrix.env }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python environment
|
||||
uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
env-dir: ./envs/${{ matrix.env }}
|
||||
no-root: true
|
||||
|
||||
- name: Run Pytest
|
||||
run: |
|
||||
cd tests/
|
||||
poetry run pytest -n auto --cov-report xml
|
||||
cd ./envs/${{ matrix.env }}
|
||||
poetry run bash "../../scripts/run-tests.sh"
|
||||
|
||||
- name: Upload coverage report
|
||||
uses: codecov/codecov-action@v3
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
env_vars: OS,PYTHON_VERSION
|
||||
env_vars: OS,PYTHON_VERSION,PYDANTIC_VERSION
|
||||
files: ./tests/coverage.xml
|
||||
flags: unittests
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
16
.github/workflows/noneflow.yml
vendored
16
.github/workflows/noneflow.yml
vendored
@@ -47,9 +47,9 @@ jobs:
|
||||
run: pipx install poetry
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Test Plugin
|
||||
id: plugin-test
|
||||
@@ -62,18 +62,18 @@ jobs:
|
||||
steps:
|
||||
- name: Generate token
|
||||
id: generate-token
|
||||
uses: tibdex/github-app-token@v1
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_KEY }}
|
||||
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Cache pre-commit hooks
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: .cache/.pre-commit
|
||||
key: noneflow-${{ runner.os }}-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
@@ -84,9 +84,9 @@ jobs:
|
||||
config: >
|
||||
{
|
||||
"base": "master",
|
||||
"plugin_path": "website/static/plugins.json",
|
||||
"bot_path": "website/static/bots.json",
|
||||
"adapter_path": "website/static/adapters.json"
|
||||
"plugin_path": "assets/plugins.json",
|
||||
"bot_path": "assets/bots.json",
|
||||
"adapter_path": "assets/adapters.json"
|
||||
}
|
||||
env:
|
||||
PLUGIN_TEST_RESULT: ${{ needs.plugin_test.outputs.result }}
|
||||
|
27
.github/workflows/pyright.yml
vendored
27
.github/workflows/pyright.yml
vendored
@@ -6,21 +6,42 @@ on:
|
||||
- master
|
||||
pull_request:
|
||||
paths:
|
||||
- "envs/**"
|
||||
- "nonebot/**"
|
||||
- "packages/**"
|
||||
- "tests/**"
|
||||
- ".github/actions/setup-python/**"
|
||||
- ".github/workflows/pyright.yml"
|
||||
- "pyproject.toml"
|
||||
- "poetry.lock"
|
||||
|
||||
jobs:
|
||||
pyright:
|
||||
name: Pyright Lint
|
||||
runs-on: ubuntu-latest
|
||||
concurrency:
|
||||
group: pyright-${{ github.ref }}-${{ matrix.env }}
|
||||
cancel-in-progress: true
|
||||
strategy:
|
||||
matrix:
|
||||
env: [pydantic-v1, pydantic-v2]
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python environment
|
||||
uses: ./.github/actions/setup-python
|
||||
with:
|
||||
env-dir: ./envs/${{ matrix.env }}
|
||||
no-root: true
|
||||
|
||||
- run: echo "$(poetry env info --path)/bin" >> $GITHUB_PATH
|
||||
- run: |
|
||||
(cd ./envs/${{ matrix.env }} && echo "$(poetry env info --path)/bin" >> $GITHUB_PATH)
|
||||
if [ "${{ matrix.env }}" = "pydantic-v1" ]; then
|
||||
sed -i 's/PYDANTIC_V2 = true/PYDANTIC_V2 = false/g' ./pyproject.toml
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
- name: Run Pyright
|
||||
uses: jakebailey/pyright-action@v1
|
||||
uses: jakebailey/pyright-action@v2
|
||||
|
70
.github/workflows/release-drafter.yml
vendored
70
.github/workflows/release-drafter.yml
vendored
@@ -20,22 +20,22 @@ jobs:
|
||||
steps:
|
||||
- name: Generate token
|
||||
id: generate-token
|
||||
uses: tibdex/github-app-token@v1
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_KEY }}
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Setup Node Environment
|
||||
uses: ./.github/actions/setup-node
|
||||
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
- uses: release-drafter/release-drafter@v6
|
||||
id: release-drafter
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Update Changelog
|
||||
uses: docker://ghcr.io/nonebot/auto-changelog:master
|
||||
@@ -59,8 +59,18 @@ jobs:
|
||||
release:
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Generate token
|
||||
id: generate-token
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_KEY }}
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python Environment
|
||||
uses: ./.github/actions/setup-python
|
||||
@@ -71,33 +81,53 @@ jobs:
|
||||
- name: Build API Doc
|
||||
uses: ./.github/actions/build-api-doc
|
||||
|
||||
- run: |
|
||||
echo "TAG_NAME=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
|
||||
- name: Get Version
|
||||
id: version
|
||||
run: |
|
||||
echo "VERSION=$(poetry version -s)" >> $GITHUB_OUTPUT
|
||||
echo "TAG_VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
|
||||
echo "TAG_NAME=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
- name: Check Version
|
||||
if: steps.version.outputs.VERSION != steps.version.outputs.TAG_VERSION
|
||||
run: exit 1
|
||||
|
||||
- uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
name: Release ${{ env.TAG_NAME }} 🌈
|
||||
tag: ${{ env.TAG_NAME }}
|
||||
name: Release ${{ steps.version.outputs.TAG_NAME }} 🌈
|
||||
tag: ${{ steps.version.outputs.TAG_NAME }}
|
||||
publish: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Build and Publish Package
|
||||
- name: Build Package
|
||||
run: |
|
||||
poetry build
|
||||
poetry publish -u ${{secrets.PYPI_USERNAME}} -p ${{secrets.PYPI_PASSWORD}}
|
||||
gh release upload --clobber ${{ env.TAG_NAME }} dist/*.tar.gz dist/*.whl
|
||||
|
||||
- name: Publish package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
|
||||
- name: Publish package to GitHub
|
||||
run: |
|
||||
gh release upload --clobber ${{ steps.version.outputs.TAG_NAME }} dist/*.tar.gz dist/*.whl
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Build and Publish Doc Package
|
||||
run: |
|
||||
yarn build:plugin --out-dir ../packages/nonebot-plugin-docs/nonebot_plugin_docs/dist
|
||||
export NONEBOT_VERSION=`poetry version -s`
|
||||
cd packages/nonebot-plugin-docs/
|
||||
poetry version $NONEBOT_VERSION
|
||||
poetry version ${{ steps.version.outputs.VERSION }}
|
||||
poetry build
|
||||
poetry publish -u ${{secrets.PYPI_USERNAME}} -p ${{secrets.PYPI_PASSWORD}}
|
||||
gh release upload --clobber ${{ env.TAG_NAME }} dist/*.tar.gz dist/*.whl
|
||||
|
||||
- name: Publish Doc Package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
packages-dir: packages/nonebot-plugin-docs/dist/
|
||||
|
||||
- name: Publish Doc Package to GitHub
|
||||
run: |
|
||||
cd packages/nonebot-plugin-docs/
|
||||
gh release upload --clobber ${{ steps.version.outputs.TAG_NAME }} dist/*.tar.gz dist/*.whl
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
|
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@@ -9,12 +9,12 @@ jobs:
|
||||
steps:
|
||||
- name: Generate token
|
||||
id: generate-token
|
||||
uses: tibdex/github-app-token@v1
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_KEY }}
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
|
11
.github/workflows/ruff.yml
vendored
11
.github/workflows/ruff.yml
vendored
@@ -6,16 +6,25 @@ on:
|
||||
- master
|
||||
pull_request:
|
||||
paths:
|
||||
- "envs/**"
|
||||
- "nonebot/**"
|
||||
- "packages/**"
|
||||
- "tests/**"
|
||||
- ".github/actions/setup-python/**"
|
||||
- ".github/workflows/ruff.yml"
|
||||
- "pyproject.toml"
|
||||
- "poetry.lock"
|
||||
|
||||
jobs:
|
||||
ruff:
|
||||
name: Ruff Lint
|
||||
runs-on: ubuntu-latest
|
||||
concurrency:
|
||||
group: pyright-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Run Ruff Lint
|
||||
uses: chartboost/ruff-action@v1
|
||||
|
4
.github/workflows/website-deploy.yml
vendored
4
.github/workflows/website-deploy.yml
vendored
@@ -13,7 +13,9 @@ jobs:
|
||||
cancel-in-progress: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Python Environment
|
||||
uses: ./.github/actions/setup-python
|
||||
|
3
.github/workflows/website-preview.yml
vendored
3
.github/workflows/website-preview.yml
vendored
@@ -11,9 +11,10 @@ jobs:
|
||||
cancel-in-progress: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Python Environment
|
||||
uses: ./.github/actions/setup-python
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -139,7 +139,7 @@ fabric.properties
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
# Icon
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
@@ -7,26 +7,26 @@ ci:
|
||||
autoupdate_commit_msg: ":arrow_up: auto update by pre-commit hooks"
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.0.276
|
||||
rev: v0.2.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
stages: [commit]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.12.0
|
||||
rev: 5.13.2
|
||||
hooks:
|
||||
- id: isort
|
||||
stages: [commit]
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.3.0
|
||||
rev: 24.1.1
|
||||
hooks:
|
||||
- id: black
|
||||
stages: [commit]
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.0.0-alpha.9-for-vscode
|
||||
rev: v4.0.0-alpha.8
|
||||
hooks:
|
||||
- id: prettier
|
||||
types_or: [javascript, jsx, ts, tsx, markdown, yaml, json]
|
||||
|
14
.prettierrc
14
.prettierrc
@@ -5,5 +5,17 @@
|
||||
"arrowParens": "always",
|
||||
"singleQuote": false,
|
||||
"trailingComma": "es5",
|
||||
"semi": true
|
||||
"semi": true,
|
||||
"overrides": [
|
||||
{
|
||||
"files": [
|
||||
"**/devcontainer.json",
|
||||
"**/tsconfig.json",
|
||||
"**/tsconfig.*.json"
|
||||
],
|
||||
"options": {
|
||||
"parser": "json"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
31
.stylelintrc.js
Normal file
31
.stylelintrc.js
Normal file
@@ -0,0 +1,31 @@
|
||||
module.exports = {
|
||||
extends: ["stylelint-config-standard", "stylelint-prettier/recommended"],
|
||||
overrides: [
|
||||
{
|
||||
files: ["*.css"],
|
||||
rules: {
|
||||
"function-no-unknown": [true, { ignoreFunctions: ["theme"] }],
|
||||
"selector-class-pattern": [
|
||||
"^([a-z][a-z0-9]*)(-[a-z0-9]+)*$",
|
||||
{
|
||||
resolveNestedSelectors: true,
|
||||
message: (selector) =>
|
||||
`Expected class selector "${selector}" to be kebab-case`,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ["*.module.css"],
|
||||
rules: {
|
||||
"selector-class-pattern": [
|
||||
"^[a-z][a-zA-Z0-9]+$",
|
||||
{
|
||||
message: (selector) =>
|
||||
`Expected class selector "${selector}" to be lowerCamelCase`,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
26
CITATION.cff
Normal file
26
CITATION.cff
Normal file
@@ -0,0 +1,26 @@
|
||||
# This CITATION.cff file was generated with cffinit.
|
||||
# Visit https://bit.ly/cffinit to generate yours today!
|
||||
|
||||
cff-version: 1.2.0
|
||||
title: NoneBot
|
||||
message: >-
|
||||
If you use this software, please cite it using the
|
||||
metadata from this file.
|
||||
type: software
|
||||
authors:
|
||||
- given-names: Yongyu
|
||||
family-names: Yan
|
||||
email: yyy@nonebot.dev
|
||||
- name: NoneBot Team
|
||||
email: contact@nonebot.dev
|
||||
website: 'https://github.com/nonebot'
|
||||
repository-code: 'https://github.com/nonebot/nonebot2'
|
||||
url: 'https://nonebot.dev/'
|
||||
abstract: >-
|
||||
NoneBot, an asynchronous multi-platform chatbot framework
|
||||
written in Python
|
||||
keywords:
|
||||
- nonebot
|
||||
- chatbot
|
||||
- pydantic
|
||||
license: MIT
|
@@ -10,12 +10,10 @@
|
||||
|
||||
### 报告问题、故障与漏洞
|
||||
|
||||
NoneBot2 仍然是一个不够稳定的开发中项目,如果你在使用过程中发现问题并确信是由 NoneBot2 引起的,欢迎提交 Issue。
|
||||
如果你在使用过程中发现问题并确信是由 NoneBot2 引起的,欢迎提交 Issue。
|
||||
|
||||
### 建议功能
|
||||
|
||||
NoneBot2 还未进入正式版,欢迎在 Issue 中提议要加入哪些新功能。
|
||||
|
||||
为了让开发者更好地理解你的意图,请认真描述你所需要的特性,可能的话可以提出你认为可行的解决方案。
|
||||
|
||||
## Pull Request
|
||||
|
77
README.md
77
README.md
@@ -54,6 +54,9 @@ _✨ 跨平台 Python 异步机器人框架 ✨_
|
||||
<a href="https://onebot.dev/">
|
||||
<img src="https://img.shields.io/badge/OneBot-v12-black?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABABAMAAABYR2ztAAAAIVBMVEUAAAAAAAADAwMHBwceHh4UFBQNDQ0ZGRkoKCgvLy8iIiLWSdWYAAAAAXRSTlMAQObYZgAAAQVJREFUSMftlM0RgjAQhV+0ATYK6i1Xb+iMd0qgBEqgBEuwBOxU2QDKsjvojQPvkJ/ZL5sXkgWrFirK4MibYUdE3OR2nEpuKz1/q8CdNxNQgthZCXYVLjyoDQftaKuniHHWRnPh2GCUetR2/9HsMAXyUT4/3UHwtQT2AggSCGKeSAsFnxBIOuAggdh3AKTL7pDuCyABcMb0aQP7aM4AnAbc/wHwA5D2wDHTTe56gIIOUA/4YYV2e1sg713PXdZJAuncdZMAGkAukU9OAn40O849+0ornPwT93rphWF0mgAbauUrEOthlX8Zu7P5A6kZyKCJy75hhw1Mgr9RAUvX7A3csGqZegEdniCx30c3agAAAABJRU5ErkJggg==" alt="onebot">
|
||||
</a>
|
||||
<a href="https://bot.q.qq.com/wiki/">
|
||||
<img src="https://img.shields.io/badge/QQ-Bot-lightgrey?style=social&logo=data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMTIuODIgMTMwLjg5Ij48ZyBkYXRhLW5hbWU9IuWbvuWxgiAyIj48ZyBkYXRhLW5hbWU9IuWbvuWxgiAxIj48cGF0aCBkPSJNNTUuNjMgMTMwLjhjLTcgMC0xMy45LjA4LTIwLjg2IDAtMTkuMTUtLjI1LTMxLjcxLTExLjQtMzQuMjItMzAuMy00LjA3LTMwLjY2IDE0LjkzLTU5LjIgNDQuODMtNjYuNjQgMi0uNTEgNS4yMS0uMzEgNS4yMS0xLjYzIDAtMi4xMy4xNC0yLjEzLjE0LTUuNTcgMC0uODktMS4zLTEuNDYtMi4yMi0yLjMxLTYuNzMtNi4yMy03LjY3LTEzLjQxLTEtMjAuMTggNS40LTUuNTIgMTEuODctNS40IDE3LjgtLjU5IDYuNDkgNS4yNiA2LjMxIDEzLjA4LS44NiAyMS0uNjguNzQtMS43OCAxLjYtMS43OCAyLjY3djQuMjFjMCAxLjM1IDIuMiAxLjYyIDQuNzkgMi4zNSAzMS4wOSA4LjY1IDQ4LjE3IDM0LjEzIDQ1IDY2LjM3LTEuNzYgMTguMTUtMTQuNTYgMzAuMjMtMzIuNyAzMC42My04LjAyLjE5LTE2LjA3LS4wMS0yNC4xMy0uMDF6IiBmaWxsPSIjMDI5OWZlIi8+PHBhdGggZD0iTTMxLjQ2IDExOC4zOGMtMTAuNS0uNjktMTYuOC02Ljg2LTE4LjM4LTE3LjI3LTMtMTkuNDIgMi43OC0zNS44NiAxOC40Ni00Ny44MyAxNC4xNi0xMC44IDI5Ljg3LTEyIDQ1LjM4LTMuMTkgMTcuMjUgOS44NCAyNC41OSAyNS44MSAyNCA0NS4yOS0uNDkgMTUuOS04LjQyIDIzLjE0LTI0LjM4IDIzLjUtNi41OS4xNC0xMy4xOSAwLTE5Ljc5IDAiIGZpbGw9IiNmZWZlZmUiLz48cGF0aCBkPSJNNDYuMDUgNzkuNThjLjA5IDUgLjIzIDkuODItNyA5Ljc3LTcuODItLjA2LTYuMS01LjY5LTYuMjQtMTAuMTktLjE1LTQuODItLjczLTEwIDYuNzMtOS44NHM2LjM3IDUuNTUgNi41MSAxMC4yNnoiIGZpbGw9IiMxMDlmZmUiLz48cGF0aCBkPSJNODAuMjcgNzkuMjdjLS41MyAzLjkxIDEuNzUgOS42NC01Ljg4IDEwLTcuNDcuMzctNi44MS00LjgyLTYuNjEtOS41LjItNC4zMi0xLjgzLTEwIDUuNzgtMTAuNDJzNi41OSA0Ljg5IDYuNzEgOS45MnoiIGZpbGw9IiMwODljZmUiLz48L2c+PC9nPjwvc3ZnPg==" alt="QQ">
|
||||
</a>
|
||||
<a href="https://core.telegram.org/bots/api">
|
||||
<img src="https://img.shields.io/badge/telegram-Bot-lightgrey?style=social&logo=telegram" alt="telegram">
|
||||
</a>
|
||||
@@ -63,9 +66,6 @@ _✨ 跨平台 Python 异步机器人框架 ✨_
|
||||
<a href="https://docs.github.com/en/developers/apps">
|
||||
<img src="https://img.shields.io/badge/GitHub-Bot-181717?style=social&logo=github" alt="github"/>
|
||||
</a>
|
||||
<a href="https://bot.q.qq.com/wiki/">
|
||||
<img src="https://img.shields.io/badge/QQ%E9%A2%91%E9%81%93-Bot-lightgrey?style=social&logo=data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMTIuODIgMTMwLjg5Ij48ZyBkYXRhLW5hbWU9IuWbvuWxgiAyIj48ZyBkYXRhLW5hbWU9IuWbvuWxgiAxIj48cGF0aCBkPSJNNTUuNjMgMTMwLjhjLTcgMC0xMy45LjA4LTIwLjg2IDAtMTkuMTUtLjI1LTMxLjcxLTExLjQtMzQuMjItMzAuMy00LjA3LTMwLjY2IDE0LjkzLTU5LjIgNDQuODMtNjYuNjQgMi0uNTEgNS4yMS0uMzEgNS4yMS0xLjYzIDAtMi4xMy4xNC0yLjEzLjE0LTUuNTcgMC0uODktMS4zLTEuNDYtMi4yMi0yLjMxLTYuNzMtNi4yMy03LjY3LTEzLjQxLTEtMjAuMTggNS40LTUuNTIgMTEuODctNS40IDE3LjgtLjU5IDYuNDkgNS4yNiA2LjMxIDEzLjA4LS44NiAyMS0uNjguNzQtMS43OCAxLjYtMS43OCAyLjY3djQuMjFjMCAxLjM1IDIuMiAxLjYyIDQuNzkgMi4zNSAzMS4wOSA4LjY1IDQ4LjE3IDM0LjEzIDQ1IDY2LjM3LTEuNzYgMTguMTUtMTQuNTYgMzAuMjMtMzIuNyAzMC42My04LjAyLjE5LTE2LjA3LS4wMS0yNC4xMy0uMDF6IiBmaWxsPSIjMDI5OWZlIi8+PHBhdGggZD0iTTMxLjQ2IDExOC4zOGMtMTAuNS0uNjktMTYuOC02Ljg2LTE4LjM4LTE3LjI3LTMtMTkuNDIgMi43OC0zNS44NiAxOC40Ni00Ny44MyAxNC4xNi0xMC44IDI5Ljg3LTEyIDQ1LjM4LTMuMTkgMTcuMjUgOS44NCAyNC41OSAyNS44MSAyNCA0NS4yOS0uNDkgMTUuOS04LjQyIDIzLjE0LTI0LjM4IDIzLjUtNi41OS4xNC0xMy4xOSAwLTE5Ljc5IDAiIGZpbGw9IiNmZWZlZmUiLz48cGF0aCBkPSJNNDYuMDUgNzkuNThjLjA5IDUgLjIzIDkuODItNyA5Ljc3LTcuODItLjA2LTYuMS01LjY5LTYuMjQtMTAuMTktLjE1LTQuODItLjczLTEwIDYuNzMtOS44NHM2LjM3IDUuNTUgNi41MSAxMC4yNnoiIGZpbGw9IiMxMDlmZmUiLz48cGF0aCBkPSJNODAuMjcgNzkuMjdjLS41MyAzLjkxIDEuNzUgOS42NC01Ljg4IDEwLTcuNDcuMzctNi44MS00LjgyLTYuNjEtOS41LjItNC4zMi0xLjgzLTEwIDUuNzgtMTAuNDJzNi41OSA0Ljg5IDYuNzEgOS45MnoiIGZpbGw9IiMwODljZmUiLz48L2c+PC9nPjwvc3ZnPg==" alt="QQ频道">
|
||||
</a>
|
||||
<!-- <a href="https://ding-doc.dingtalk.com/document#/org-dev-guide/elzz1p">
|
||||
<img src="https://img.shields.io/badge/%E9%92%89%E9%92%89-Bot-lightgrey?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAMAAACdt4HsAAAAnFBMVEUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD4jUzeAAAAM3RSTlMAQKSRaA+/f0YyFevh29R3cyklIfrlyrGsn41tVUs48c/HqJm9uZdhX1otGwkF9IN8V1CX0Q+IAAABY0lEQVRYw+3V2W7CMBAF0JuNQAhhX9OEfYdu9///rUVWpagE27Ef2gfO+0zGozsKnv6bMGzAhkNytIe5gDdzrwtTCwrbI8x4/NF668NAxgI3Q3UtFi3TyPwNQtPLUUmDd8YfqGLNe4v22XwEYb5zoOuF5baHq2UHtsKe5ivWfGAwrWu2mC34QM0PoCAuqZdOmiwV+5BLyMRtZ7dTSEcs48rzWfzwptMLyzpApka1SJ5FtR4kfCqNIBPEVDmqoqgwUYY5plQOlf6UEjNoOPnuKB6wzDyCrks///TDza8+PnR109WQdxLo8RKWq0PPnuXG0OXKQ6wWLFnCg75uYYbhmMIVVdQ709q33aHbGIj6Duz+2k1HQFX9VwqmY8xYsEJll2ahvhWgsjYLHFRXvIi2Qb0jzMQCzC3FAoydxCma88UCzE3JCWwkjCNYyMUCzHX4DiuTMawEwwhW6hnshPhjZzzJfAH0YacpbmRd7QAAAABJRU5ErkJggg==" alt="dingtalk"> -->
|
||||
</a>
|
||||
@@ -94,7 +94,7 @@ _✨ 跨平台 Python 异步机器人框架 ✨_
|
||||
|
||||
<p align="center">
|
||||
<a href="https://asciinema.org/a/569440">
|
||||
<img src="https://nonebot.dev/img/setup.svg">
|
||||
<img src="https://nonebot.dev/img/setup.svg" alt="setup" >
|
||||
</a>
|
||||
</p>
|
||||
|
||||
@@ -111,21 +111,25 @@ NoneBot2 是一个现代、跨平台、可扩展的 Python 聊天机器人框架
|
||||
- 海纳百川:一个框架,支持多个聊天软件平台,可自定义通信协议
|
||||
|
||||
| 协议名称 | 状态 | 注释 |
|
||||
| :-------------------------------------------------------------------------------------------------------------------: | :--: | :-----------------------------------------------------------------------: |
|
||||
| :--------------------------------------------------------------------------------------------------------------------------: | :--: | :-----------------------------------------------------------------------: |
|
||||
| OneBot([仓库](https://github.com/nonebot/adapter-onebot),[协议](https://onebot.dev/)) | ✅ | 支持 QQ、TG、微信公众号、KOOK 等[平台](https://onebot.dev/ecosystem.html) |
|
||||
| Telegram([仓库](https://github.com/nonebot/adapter-telegram),[协议](https://core.telegram.org/bots/api)) | ✅ | |
|
||||
| 飞书([仓库](https://github.com/nonebot/adapter-feishu),[协议](https://open.feishu.cn/document/home/index)) | ✅ | |
|
||||
| GitHub([仓库](https://github.com/nonebot/adapter-github),[协议](https://docs.github.com/en/apps)) | ✅ | GitHub APP & OAuth APP |
|
||||
| QQ 频道([仓库](https://github.com/nonebot/adapter-qqguild),[协议](https://bot.q.qq.com/wiki/)) | ✅ | 官方接口调整较多 |
|
||||
| QQ([仓库](https://github.com/nonebot/adapter-qq),[协议](https://bot.q.qq.com/wiki/)) | ✅ | QQ 官方接口调整较多 |
|
||||
| 钉钉([仓库](https://github.com/nonebot/adapter-ding),[协议](https://open.dingtalk.com/document/)) | 🤗 | 寻找 Maintainer(暂不可用) |
|
||||
| Console([仓库](https://github.com/nonebot/adapter-console)) | ✅ | 控制台交互 |
|
||||
| Red ([仓库](https://github.com/nonebot/adapter-red),[协议](https://chrononeko.github.io/QQNTRedProtocol/)) | ✅ | QQ 协议 |
|
||||
| Satori([仓库](https://github.com/nonebot/adapter-satori),[协议](https://satori.js.org/zh-CN)) | ✅ | 支持 Onebot、TG、飞书、微信公众号、Koishi 等 |
|
||||
| Discord ([仓库](https://github.com/nonebot/adapter-discord),[协议](https://discord.com/developers/docs/intro)) | ✅ | Discord Bot 协议 |
|
||||
| DoDo ([仓库](https://github.com/nonebot/adapter-dodo),[协议](https://open.imdodo.com/)) | ✅ | DoDo Bot 协议 |
|
||||
| 开黑啦([仓库](https://github.com/Tian-que/nonebot-adapter-kaiheila),[协议](https://developer.kookapp.cn/)) | ↗️ | 由社区贡献 |
|
||||
| Mirai([仓库](https://github.com/ieew/nonebot_adapter_mirai2),[协议](https://docs.mirai.mamoe.net/mirai-api-http/)) | ↗️ | QQ 协议,由社区贡献 |
|
||||
| Ntchat([仓库](https://github.com/JustUndertaker/adapter-ntchat)) | ↗️ | 微信协议,由社区贡献 |
|
||||
| MineCraft([仓库](https://github.com/17TheWord/nonebot-adapter-minecraft)) | ↗️ | 由社区贡献 |
|
||||
| BiliBili Live([仓库](https://github.com/wwweww/adapter-bilibili)) | ↗️ | 由社区贡献 |
|
||||
| Walle-Q([仓库](https://github.com/onebot-walle/nonebot_adapter_walleq)) | ↗️ | QQ 协议,由社区贡献 |
|
||||
| Villa([仓库](https://github.com/CMHopeSunshine/nonebot-adapter-villa)) | ↗️ | 米游社大别野 Bot 协议,由社区贡献 |
|
||||
| Villa([仓库](https://github.com/CMHopeSunshine/nonebot-adapter-villa),[协议](https://webstatic.mihoyo.com/vila/bot/doc/)) | ↗️ | 米游社大别野 Bot 协议,由社区贡献 |
|
||||
|
||||
- 坚实后盾:支持多种 web 框架,可自定义替换、组合
|
||||
|
||||
@@ -204,9 +208,8 @@ NoneBot2 不是 NoneBot1 的替代品。事实上,它们都在被积极的维
|
||||
或者尝试以下镜像:
|
||||
|
||||
- [文档镜像(中国境内)](https://nb2.baka.icu)
|
||||
- [文档镜像(Vercel)](https://nonebot2-vercel-mirror.vercel.app)
|
||||
|
||||
- 其他插件请查看 [商店](https://nonebot.dev/store)
|
||||
- 其他插件请查看 [商店](https://nonebot.dev/store/plugins)
|
||||
|
||||
## 许可证
|
||||
|
||||
@@ -225,10 +228,62 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
请参考 [贡献指南](./CONTRIBUTING.md)
|
||||
|
||||
### 鸣谢
|
||||
## 鸣谢
|
||||
|
||||
### 赞助者
|
||||
|
||||
感谢以下产品对 NoneBot 项目提供的赞助:
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://assets.nonebot.dev/github-dark.png">
|
||||
<img src="https://assets.nonebot.dev/github-light.png" height="50" alt="GitHub">
|
||||
</picture>
|
||||
</a>
|
||||
<a href="https://www.netlify.com/">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://assets.nonebot.dev/netlify-dark.svg">
|
||||
<img src="https://assets.nonebot.dev/netlify-light.svg" height="50" alt="netlify">
|
||||
</picture>
|
||||
</a>
|
||||
<a href="https://sentry.io/">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://assets.nonebot.dev/sentry-dark.svg">
|
||||
<img src="https://assets.nonebot.dev/sentry-light.svg" height="50" alt="sentry">
|
||||
</picture>
|
||||
</a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://www.docker.com/">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://assets.nonebot.dev/docker-dark.svg">
|
||||
<img src="https://assets.nonebot.dev/docker-light.svg" height="50" alt="docker">
|
||||
</picture>
|
||||
</a>
|
||||
<a href="https://www.algolia.com/">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://assets.nonebot.dev/algolia-dark.svg">
|
||||
<img src="https://assets.nonebot.dev/algolia-light.svg" height="50" alt="algolia">
|
||||
</picture>
|
||||
</a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://www.jetbrains.com/">
|
||||
<img src="https://resources.jetbrains.com/storage/products/company/brand/logos/jb_beam.svg" height="80" alt="JetBrains" >
|
||||
</a>
|
||||
</p>
|
||||
|
||||
感谢以下赞助者对 NoneBot 项目提供的资金支持:
|
||||
|
||||
<a href="https://assets.nonebot.dev/sponsors.svg">
|
||||
<img src="https://assets.nonebot.dev/sponsors.svg" alt="sponsors" />
|
||||
</a>
|
||||
|
||||
### 开发者
|
||||
|
||||
感谢以下开发者对 NoneBot2 作出的贡献:
|
||||
|
||||
<a href="https://github.com/nonebot/nonebot2/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=nonebot/nonebot2&max=1000" />
|
||||
<img src="https://contrib.rocks/image?repo=nonebot/nonebot2&max=1000" alt="contributors" />
|
||||
</a>
|
||||
|
@@ -40,12 +40,12 @@
|
||||
"is_official": true
|
||||
},
|
||||
{
|
||||
"module_name": "nonebot.adapters.qqguild",
|
||||
"project_link": "nonebot-adapter-qqguild",
|
||||
"name": "QQ 频道",
|
||||
"desc": "QQ 频道官方机器人",
|
||||
"module_name": "nonebot.adapters.qq",
|
||||
"project_link": "nonebot-adapter-qq",
|
||||
"name": "QQ",
|
||||
"desc": "QQ 官方机器人",
|
||||
"author": "yanyongyu",
|
||||
"homepage": "https://github.com/nonebot/adapter-qqguild",
|
||||
"homepage": "https://github.com/nonebot/adapter-qq",
|
||||
"tags": [],
|
||||
"is_official": true
|
||||
},
|
||||
@@ -168,5 +168,50 @@
|
||||
}
|
||||
],
|
||||
"is_official": false
|
||||
},
|
||||
{
|
||||
"module_name": "nonebot.adapters.red",
|
||||
"project_link": "nonebot-adapter-red",
|
||||
"name": "RedProtocol",
|
||||
"desc": "QQNT RedProtocol 适配",
|
||||
"author": "zhaomaoniu",
|
||||
"homepage": "https://github.com/nonebot/adapter-red",
|
||||
"tags": [],
|
||||
"is_official": true
|
||||
},
|
||||
{
|
||||
"module_name": "nonebot.adapters.discord",
|
||||
"project_link": "nonebot-adapter-discord",
|
||||
"name": "Discord",
|
||||
"desc": "Discord 官方 Bot 协议适配",
|
||||
"author": "CMHopeSunshine",
|
||||
"homepage": "https://github.com/nonebot/adapter-discord",
|
||||
"tags": [],
|
||||
"is_official": true
|
||||
},
|
||||
{
|
||||
"module_name": "nonebot.adapters.satori",
|
||||
"project_link": "nonebot-adapter-satori",
|
||||
"name": "Satori",
|
||||
"desc": "Satori 协议适配器",
|
||||
"author": "RF-Tar-Railt",
|
||||
"homepage": "https://github.com/nonebot/adapter-satori",
|
||||
"tags": [
|
||||
{
|
||||
"label": "跨平台",
|
||||
"color": "#bf40bf"
|
||||
}
|
||||
],
|
||||
"is_official": true
|
||||
},
|
||||
{
|
||||
"module_name": "nonebot.adapters.dodo",
|
||||
"project_link": "nonebot-adapter-dodo",
|
||||
"name": "DoDo",
|
||||
"desc": "DoDo Bot 协议适配器",
|
||||
"author": "CMHopeSunshine",
|
||||
"homepage": "https://github.com/nonebot/adapter-dodo",
|
||||
"tags": [],
|
||||
"is_official": true
|
||||
}
|
||||
]
|
@@ -541,5 +541,71 @@
|
||||
"homepage": "https://github.com/LambdaYH/MigangBot",
|
||||
"tags": [],
|
||||
"is_official": false
|
||||
},
|
||||
{
|
||||
"name": "不正经的妹妹",
|
||||
"desc": "一款功能丰富、简单易用、自定义性强、扩展性强的可爱的QQ娱乐机器人",
|
||||
"author": "itsevin",
|
||||
"homepage": "https://github.com/itsevin/sister_bot",
|
||||
"tags": [],
|
||||
"is_official": false
|
||||
},
|
||||
{
|
||||
"name": "星见Kirami",
|
||||
"desc": "🌟 读作 Kirami,写作星见,简明轻快的聊天机器人应用。",
|
||||
"author": "A-kirami",
|
||||
"homepage": "https://kiramibot.dev/",
|
||||
"tags": [],
|
||||
"is_official": false
|
||||
},
|
||||
{
|
||||
"name": "OCNbot",
|
||||
"desc": "OI Contest Notifier bot,一个可以推送洛谷、cf、atcoder、牛客比赛通知的bot",
|
||||
"author": "ACnoway",
|
||||
"homepage": "https://github.com/ACnoway/OCNbot",
|
||||
"tags": [
|
||||
{
|
||||
"label": "OI",
|
||||
"color": "#2fccff"
|
||||
},
|
||||
{
|
||||
"label": "ACM",
|
||||
"color": "#ff0004"
|
||||
}
|
||||
],
|
||||
"is_official": false
|
||||
},
|
||||
{
|
||||
"name": "妃爱",
|
||||
"desc": "超可爱的妃爱QQ群聊机器人",
|
||||
"author": "jiangyuxiaoxiao",
|
||||
"homepage": "https://github.com/jiangyuxiaoxiao/Hiyori",
|
||||
"tags": [],
|
||||
"is_official": false
|
||||
},
|
||||
{
|
||||
"name": "芙芙",
|
||||
"desc": "供 Mooncell Wiki 协作使用的跨平台机器人",
|
||||
"author": "StarHeartHunt",
|
||||
"homepage": "https://github.com/MooncellWiki/BotFooChan",
|
||||
"tags": [],
|
||||
"is_official": false
|
||||
},
|
||||
{
|
||||
"name": "Sakiko",
|
||||
"desc": "基于 LiteLoaderBDS 的 Minecraft 基岩版 Bot",
|
||||
"author": "zhaomaoniu",
|
||||
"homepage": "https://github.com/zhaomaoniu/Sakiko",
|
||||
"tags": [
|
||||
{
|
||||
"label": "Minecraft",
|
||||
"color": "#6cc349"
|
||||
},
|
||||
{
|
||||
"label": "BanGDream",
|
||||
"color": "#e70050"
|
||||
}
|
||||
],
|
||||
"is_official": false
|
||||
}
|
||||
]
|
5556
assets/plugins.json
Normal file
5556
assets/plugins.json
Normal file
File diff suppressed because it is too large
Load Diff
2166
envs/pydantic-v1/poetry.lock
generated
Normal file
2166
envs/pydantic-v1/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
18
envs/pydantic-v1/pyproject.toml
Normal file
18
envs/pydantic-v1/pyproject.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[tool.poetry]
|
||||
name = "nonebot-pydantic-v1"
|
||||
version = "0.1.0"
|
||||
description = "Private pydantic v1 test env for nonebot"
|
||||
authors = ["yanyongyu <yyy@nonebot.dev>"]
|
||||
license = "MIT"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pydantic = "^1.0.0"
|
||||
nonebot-test = { path = "../test/", develop = false }
|
||||
nonebot2 = { path = "../../", extras = ["all"], develop = true }
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
2238
envs/pydantic-v2/poetry.lock
generated
Normal file
2238
envs/pydantic-v2/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
18
envs/pydantic-v2/pyproject.toml
Normal file
18
envs/pydantic-v2/pyproject.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[tool.poetry]
|
||||
name = "nonebot-pydantic-v2"
|
||||
version = "0.1.0"
|
||||
description = "Private pydantic v2 test env for nonebot"
|
||||
authors = ["yanyongyu <yyy@nonebot.dev>"]
|
||||
license = "MIT"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pydantic = "^2.0.0"
|
||||
nonebot-test = { path = "../test/", develop = false }
|
||||
nonebot2 = { path = "../../", extras = ["all"], develop = true }
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
1
envs/test/nonebot-test.py
Normal file
1
envs/test/nonebot-test.py
Normal file
@@ -0,0 +1 @@
|
||||
# fake file to make project installable
|
1029
envs/test/poetry.lock
generated
Normal file
1029
envs/test/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
21
envs/test/pyproject.toml
Normal file
21
envs/test/pyproject.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[tool.poetry]
|
||||
name = "nonebot-test"
|
||||
version = "0.1.0"
|
||||
description = "Private test env for nonebot"
|
||||
authors = ["yanyongyu <yyy@nonebot.dev>"]
|
||||
license = "MIT"
|
||||
packages = [{ include = "nonebot-test.py" }]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
nonebug = "^0.3.0"
|
||||
wsproto = "^1.2.0"
|
||||
pytest-cov = "^4.0.0"
|
||||
pytest-xdist = "^3.0.2"
|
||||
pytest-asyncio = "^0.23.2"
|
||||
werkzeug = ">=2.3.6,<4.0.0"
|
||||
coverage-conditional-plugin = "^0.9.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
@@ -35,6 +35,7 @@
|
||||
{ref}``get_loaded_plugins` <nonebot.plugin.get_loaded_plugins>`
|
||||
- `get_available_plugin_names` =>
|
||||
{ref}``get_available_plugin_names` <nonebot.plugin.get_available_plugin_names>`
|
||||
- `get_plugin_config` => {ref}``get_plugin_config` <nonebot.plugin.get_plugin_config>`
|
||||
- `require` => {ref}``require` <nonebot.plugin.load.require>`
|
||||
|
||||
FrontMatter:
|
||||
@@ -47,13 +48,13 @@ from importlib.metadata import version
|
||||
from typing import Any, Dict, Type, Union, TypeVar, Optional, overload
|
||||
|
||||
import loguru
|
||||
from pydantic.env_settings import DotenvType
|
||||
|
||||
from nonebot.config import Env, Config
|
||||
from nonebot.compat import model_dump
|
||||
from nonebot.log import logger as logger
|
||||
from nonebot.adapters import Bot, Adapter
|
||||
from nonebot.config import DOTENV_TYPE, Env, Config
|
||||
from nonebot.utils import escape_tag, resolve_dot_notation
|
||||
from nonebot.drivers import Driver, ReverseDriver, combine_driver
|
||||
from nonebot.drivers import Driver, ASGIMixin, combine_driver
|
||||
|
||||
try:
|
||||
__version__ = version("nonebot2")
|
||||
@@ -149,13 +150,13 @@ def get_adapters() -> Dict[str, Adapter]:
|
||||
|
||||
|
||||
def get_app() -> Any:
|
||||
"""获取全局 {ref}`nonebot.drivers.ReverseDriver` 对应的 Server App 对象。
|
||||
"""获取全局 {ref}`nonebot.drivers.ASGIMixin` 对应的 Server App 对象。
|
||||
|
||||
返回:
|
||||
Server App 对象
|
||||
|
||||
异常:
|
||||
AssertionError: 全局 Driver 对象不是 {ref}`nonebot.drivers.ReverseDriver` 类型
|
||||
AssertionError: 全局 Driver 对象不是 {ref}`nonebot.drivers.ASGIMixin` 类型
|
||||
ValueError: 全局 {ref}`nonebot.drivers.Driver` 对象尚未初始化
|
||||
({ref}`nonebot.init <nonebot.init>` 尚未调用)
|
||||
|
||||
@@ -165,21 +166,19 @@ def get_app() -> Any:
|
||||
```
|
||||
"""
|
||||
driver = get_driver()
|
||||
assert isinstance(
|
||||
driver, ReverseDriver
|
||||
), "app object is only available for reverse driver"
|
||||
assert isinstance(driver, ASGIMixin), "app object is only available for asgi driver"
|
||||
return driver.server_app
|
||||
|
||||
|
||||
def get_asgi() -> Any:
|
||||
"""获取全局 {ref}`nonebot.drivers.ReverseDriver` 对应
|
||||
"""获取全局 {ref}`nonebot.drivers.ASGIMixin` 对应的
|
||||
[ASGI](https://asgi.readthedocs.io/) 对象。
|
||||
|
||||
返回:
|
||||
ASGI 对象
|
||||
|
||||
异常:
|
||||
AssertionError: 全局 Driver 对象不是 {ref}`nonebot.drivers.ReverseDriver` 类型
|
||||
AssertionError: 全局 Driver 对象不是 {ref}`nonebot.drivers.ASGIMixin` 类型
|
||||
ValueError: 全局 {ref}`nonebot.drivers.Driver` 对象尚未初始化
|
||||
({ref}`nonebot.init <nonebot.init>` 尚未调用)
|
||||
|
||||
@@ -190,8 +189,8 @@ def get_asgi() -> Any:
|
||||
"""
|
||||
driver = get_driver()
|
||||
assert isinstance(
|
||||
driver, ReverseDriver
|
||||
), "asgi object is only available for reverse driver"
|
||||
driver, ASGIMixin
|
||||
), "asgi object is only available for asgi driver"
|
||||
return driver.asgi
|
||||
|
||||
|
||||
@@ -275,7 +274,7 @@ def _log_patcher(record: "loguru.Record"):
|
||||
)
|
||||
|
||||
|
||||
def init(*, _env_file: Optional[DotenvType] = None, **kwargs: Any) -> None:
|
||||
def init(*, _env_file: Optional[DOTENV_TYPE] = None, **kwargs: Any) -> None:
|
||||
"""初始化 NoneBot 以及 全局 {ref}`nonebot.drivers.Driver` 对象。
|
||||
|
||||
NoneBot 将会从 .env 文件中读取环境信息,并使用相应的 env 文件配置。
|
||||
@@ -298,9 +297,11 @@ def init(*, _env_file: Optional[DotenvType] = None, **kwargs: Any) -> None:
|
||||
_env_file = _env_file or f".env.{env.environment}"
|
||||
config = Config(
|
||||
**kwargs,
|
||||
_env_file=(".env", _env_file)
|
||||
_env_file=(
|
||||
(".env", _env_file)
|
||||
if isinstance(_env_file, (str, os.PathLike))
|
||||
else _env_file,
|
||||
else _env_file
|
||||
),
|
||||
)
|
||||
|
||||
logger.configure(
|
||||
@@ -310,7 +311,7 @@ def init(*, _env_file: Optional[DotenvType] = None, **kwargs: Any) -> None:
|
||||
f"Current <y><b>Env: {escape_tag(env.environment)}</b></y>"
|
||||
)
|
||||
logger.opt(colors=True).debug(
|
||||
f"Loaded <y><b>Config</b></y>: {escape_tag(str(config.dict()))}"
|
||||
f"Loaded <y><b>Config</b></y>: {escape_tag(str(model_dump(config)))}"
|
||||
)
|
||||
|
||||
DriverClass = _resolve_combine_expr(config.driver)
|
||||
@@ -355,10 +356,9 @@ from nonebot.plugin import load_from_json as load_from_json
|
||||
from nonebot.plugin import load_from_toml as load_from_toml
|
||||
from nonebot.plugin import load_all_plugins as load_all_plugins
|
||||
from nonebot.plugin import on_shell_command as on_shell_command
|
||||
from nonebot.plugin import get_plugin_config as get_plugin_config
|
||||
from nonebot.plugin import get_loaded_plugins as get_loaded_plugins
|
||||
from nonebot.plugin import load_builtin_plugin as load_builtin_plugin
|
||||
from nonebot.plugin import load_builtin_plugins as load_builtin_plugins
|
||||
from nonebot.plugin import get_plugin_by_module_name as get_plugin_by_module_name
|
||||
from nonebot.plugin import get_available_plugin_names as get_available_plugin_names
|
||||
|
||||
__autodoc__ = {"internal": False}
|
||||
|
383
nonebot/compat.py
Normal file
383
nonebot/compat.py
Normal file
@@ -0,0 +1,383 @@
|
||||
"""本模块为 Pydantic 版本兼容层模块
|
||||
|
||||
为兼容 Pydantic V1 与 V2 版本,定义了一系列兼容函数与类供使用。
|
||||
|
||||
FrontMatter:
|
||||
sidebar_position: 16
|
||||
description: nonebot.compat 模块
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass, is_dataclass
|
||||
from typing_extensions import Self, Annotated, get_args, get_origin, is_typeddict
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Set,
|
||||
Dict,
|
||||
List,
|
||||
Type,
|
||||
Union,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Optional,
|
||||
Protocol,
|
||||
Generator,
|
||||
)
|
||||
|
||||
from pydantic import VERSION, BaseModel
|
||||
|
||||
from nonebot.typing import origin_is_annotated
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
PYDANTIC_V2 = int(VERSION.split(".", 1)[0]) == 2
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class _CustomValidationClass(Protocol):
|
||||
@classmethod
|
||||
def __get_validators__(cls) -> Generator[Callable[..., Any], None, None]: ...
|
||||
|
||||
CVC = TypeVar("CVC", bound=_CustomValidationClass)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"Required",
|
||||
"PydanticUndefined",
|
||||
"PydanticUndefinedType",
|
||||
"ConfigDict",
|
||||
"DEFAULT_CONFIG",
|
||||
"FieldInfo",
|
||||
"ModelField",
|
||||
"extract_field_info",
|
||||
"model_field_validate",
|
||||
"model_fields",
|
||||
"model_config",
|
||||
"model_dump",
|
||||
"type_validate_python",
|
||||
"type_validate_json",
|
||||
"custom_validation",
|
||||
)
|
||||
|
||||
__autodoc__ = {
|
||||
"PydanticUndefined": "Pydantic Undefined object",
|
||||
"PydanticUndefinedType": "Pydantic Undefined type",
|
||||
}
|
||||
|
||||
|
||||
if PYDANTIC_V2: # pragma: pydantic-v2
|
||||
from pydantic_core import CoreSchema, core_schema
|
||||
from pydantic._internal._repr import display_as_type
|
||||
from pydantic import TypeAdapter, GetCoreSchemaHandler
|
||||
from pydantic.fields import FieldInfo as BaseFieldInfo
|
||||
|
||||
Required = Ellipsis
|
||||
"""Alias of Ellipsis for compatibility with pydantic v1"""
|
||||
|
||||
# Export undefined type
|
||||
from pydantic_core import PydanticUndefined as PydanticUndefined
|
||||
from pydantic_core import PydanticUndefinedType as PydanticUndefinedType
|
||||
|
||||
# isort: split
|
||||
|
||||
# Export model config dict
|
||||
from pydantic import ConfigDict as ConfigDict
|
||||
|
||||
DEFAULT_CONFIG = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
||||
"""Default config for validations"""
|
||||
|
||||
class FieldInfo(BaseFieldInfo):
|
||||
"""FieldInfo class with extra property for compatibility with pydantic v1"""
|
||||
|
||||
# make default can be positional argument
|
||||
def __init__(self, default: Any = PydanticUndefined, **kwargs: Any) -> None:
|
||||
super().__init__(default=default, **kwargs)
|
||||
|
||||
@property
|
||||
def extra(self) -> Dict[str, Any]:
|
||||
"""Extra data that is not part of the standard pydantic fields.
|
||||
|
||||
For compatibility with pydantic v1.
|
||||
"""
|
||||
# extract extra data from attributes set except used slots
|
||||
# we need to call super in advance due to
|
||||
# comprehension not inlined in cpython < 3.12
|
||||
# https://peps.python.org/pep-0709/
|
||||
slots = super().__slots__
|
||||
return {k: v for k, v in self._attributes_set.items() if k not in slots}
|
||||
|
||||
@dataclass
|
||||
class ModelField:
|
||||
"""ModelField class for compatibility with pydantic v1"""
|
||||
|
||||
name: str
|
||||
"""The name of the field."""
|
||||
annotation: Any
|
||||
"""The annotation of the field."""
|
||||
field_info: FieldInfo
|
||||
"""The FieldInfo of the field."""
|
||||
|
||||
@classmethod
|
||||
def _construct(cls, name: str, annotation: Any, field_info: FieldInfo) -> Self:
|
||||
return cls(name, annotation, field_info)
|
||||
|
||||
@classmethod
|
||||
def construct(
|
||||
cls, name: str, annotation: Any, field_info: Optional[FieldInfo] = None
|
||||
) -> Self:
|
||||
"""Construct a ModelField from given infos."""
|
||||
return cls._construct(name, annotation, field_info or FieldInfo())
|
||||
|
||||
def _annotation_has_config(self) -> bool:
|
||||
"""Check if the annotation has config.
|
||||
|
||||
TypeAdapter raise error when annotation has config
|
||||
and given config is not None.
|
||||
"""
|
||||
type_is_annotated = origin_is_annotated(get_origin(self.annotation))
|
||||
inner_type = (
|
||||
get_args(self.annotation)[0] if type_is_annotated else self.annotation
|
||||
)
|
||||
try:
|
||||
return (
|
||||
issubclass(inner_type, BaseModel)
|
||||
or is_dataclass(inner_type)
|
||||
or is_typeddict(inner_type)
|
||||
)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
def get_default(self) -> Any:
|
||||
"""Get the default value of the field."""
|
||||
return self.field_info.get_default(call_default_factory=True)
|
||||
|
||||
def _type_display(self):
|
||||
"""Get the display of the type of the field."""
|
||||
return display_as_type(self.annotation)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
# Each ModelField is unique for our purposes,
|
||||
# to allow store them in a set.
|
||||
return id(self)
|
||||
|
||||
def extract_field_info(field_info: BaseFieldInfo) -> Dict[str, Any]:
|
||||
"""Get FieldInfo init kwargs from a FieldInfo instance."""
|
||||
|
||||
kwargs = field_info._attributes_set.copy()
|
||||
kwargs["annotation"] = field_info.rebuild_annotation()
|
||||
return kwargs
|
||||
|
||||
def model_field_validate(
|
||||
model_field: ModelField, value: Any, config: Optional[ConfigDict] = None
|
||||
) -> Any:
|
||||
"""Validate the value pass to the field."""
|
||||
type: Any = Annotated[model_field.annotation, model_field.field_info]
|
||||
return TypeAdapter(
|
||||
type, config=None if model_field._annotation_has_config() else config
|
||||
).validate_python(value)
|
||||
|
||||
def model_fields(model: Type[BaseModel]) -> List[ModelField]:
|
||||
"""Get field list of a model."""
|
||||
|
||||
return [
|
||||
ModelField._construct(
|
||||
name=name,
|
||||
annotation=field_info.rebuild_annotation(),
|
||||
field_info=FieldInfo(**extract_field_info(field_info)),
|
||||
)
|
||||
for name, field_info in model.model_fields.items()
|
||||
]
|
||||
|
||||
def model_config(model: Type[BaseModel]) -> Any:
|
||||
"""Get config of a model."""
|
||||
return model.model_config
|
||||
|
||||
def model_dump(
|
||||
model: BaseModel,
|
||||
include: Optional[Set[str]] = None,
|
||||
exclude: Optional[Set[str]] = None,
|
||||
by_alias: bool = False,
|
||||
exclude_unset: bool = False,
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
) -> Dict[str, Any]:
|
||||
return model.model_dump(
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
)
|
||||
|
||||
def type_validate_python(type_: Type[T], data: Any) -> T:
|
||||
"""Validate data with given type."""
|
||||
return TypeAdapter(type_).validate_python(data)
|
||||
|
||||
def type_validate_json(type_: Type[T], data: Union[str, bytes]) -> T:
|
||||
"""Validate JSON with given type."""
|
||||
return TypeAdapter(type_).validate_json(data)
|
||||
|
||||
def __get_pydantic_core_schema__(
|
||||
cls: Type["_CustomValidationClass"],
|
||||
source_type: Any,
|
||||
handler: GetCoreSchemaHandler,
|
||||
) -> CoreSchema:
|
||||
validators = list(cls.__get_validators__())
|
||||
if len(validators) == 1:
|
||||
return core_schema.no_info_plain_validator_function(validators[0])
|
||||
return core_schema.chain_schema(
|
||||
[core_schema.no_info_plain_validator_function(func) for func in validators]
|
||||
)
|
||||
|
||||
def custom_validation(class_: Type["CVC"]) -> Type["CVC"]:
|
||||
"""Use pydantic v1 like validator generator in pydantic v2"""
|
||||
|
||||
setattr(
|
||||
class_,
|
||||
"__get_pydantic_core_schema__",
|
||||
classmethod(__get_pydantic_core_schema__),
|
||||
)
|
||||
return class_
|
||||
|
||||
else: # pragma: pydantic-v1
|
||||
from pydantic import Extra
|
||||
from pydantic import parse_obj_as, parse_raw_as
|
||||
from pydantic import BaseConfig as PydanticConfig
|
||||
from pydantic.fields import FieldInfo as BaseFieldInfo
|
||||
from pydantic.fields import ModelField as BaseModelField
|
||||
from pydantic.schema import get_annotation_from_field_info
|
||||
|
||||
# isort: split
|
||||
|
||||
from pydantic.fields import Required as Required
|
||||
|
||||
# isort: split
|
||||
|
||||
from pydantic.fields import Undefined as PydanticUndefined
|
||||
from pydantic.fields import UndefinedType as PydanticUndefinedType
|
||||
|
||||
class ConfigDict(PydanticConfig):
|
||||
"""Config class that allow get value with default value."""
|
||||
|
||||
@classmethod
|
||||
def get(cls, field: str, default: Any = None) -> Any:
|
||||
"""Get a config value."""
|
||||
return getattr(cls, field, default)
|
||||
|
||||
class DEFAULT_CONFIG(ConfigDict):
|
||||
extra = Extra.allow
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
class FieldInfo(BaseFieldInfo):
|
||||
def __init__(self, default: Any = PydanticUndefined, **kwargs: Any):
|
||||
# preprocess default value to make it compatible with pydantic v2
|
||||
# when default is Required, set it to PydanticUndefined
|
||||
if default is Required:
|
||||
default = PydanticUndefined
|
||||
super().__init__(default, **kwargs)
|
||||
|
||||
class ModelField(BaseModelField):
|
||||
@classmethod
|
||||
def _construct(cls, name: str, annotation: Any, field_info: FieldInfo) -> Self:
|
||||
return cls(
|
||||
name=name,
|
||||
type_=annotation,
|
||||
class_validators=None,
|
||||
model_config=DEFAULT_CONFIG,
|
||||
default=field_info.default,
|
||||
default_factory=field_info.default_factory,
|
||||
required=(
|
||||
field_info.default is PydanticUndefined
|
||||
and field_info.default_factory is None
|
||||
),
|
||||
field_info=field_info,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def construct(
|
||||
cls, name: str, annotation: Any, field_info: Optional[FieldInfo] = None
|
||||
) -> Self:
|
||||
"""Construct a ModelField from given infos.
|
||||
|
||||
Field annotation is preprocessed with field_info.
|
||||
"""
|
||||
if field_info is not None:
|
||||
annotation = get_annotation_from_field_info(
|
||||
annotation, field_info, name
|
||||
)
|
||||
return cls._construct(name, annotation, field_info or FieldInfo())
|
||||
|
||||
def extract_field_info(field_info: BaseFieldInfo) -> Dict[str, Any]:
|
||||
"""Get FieldInfo init kwargs from a FieldInfo instance."""
|
||||
|
||||
kwargs = {
|
||||
s: getattr(field_info, s) for s in field_info.__slots__ if s != "extra"
|
||||
}
|
||||
kwargs.update(field_info.extra)
|
||||
return kwargs
|
||||
|
||||
def model_field_validate(
|
||||
model_field: ModelField, value: Any, config: Optional[Type[ConfigDict]] = None
|
||||
) -> Any:
|
||||
"""Validate the value pass to the field.
|
||||
|
||||
Set config before validate to ensure validate correctly.
|
||||
"""
|
||||
|
||||
if model_field.model_config is not config:
|
||||
model_field.set_config(config or ConfigDict)
|
||||
|
||||
v, errs_ = model_field.validate(value, {}, loc=())
|
||||
if errs_:
|
||||
raise ValueError(value, model_field)
|
||||
return v
|
||||
|
||||
def model_fields(model: Type[BaseModel]) -> List[ModelField]:
|
||||
"""Get field list of a model."""
|
||||
|
||||
# construct the model field without preprocess to avoid error
|
||||
return [
|
||||
ModelField._construct(
|
||||
name=model_field.name,
|
||||
annotation=model_field.annotation,
|
||||
field_info=FieldInfo(
|
||||
**extract_field_info(model_field.field_info),
|
||||
),
|
||||
)
|
||||
for model_field in model.__fields__.values()
|
||||
]
|
||||
|
||||
def model_config(model: Type[BaseModel]) -> Any:
|
||||
"""Get config of a model."""
|
||||
return model.__config__
|
||||
|
||||
def model_dump(
|
||||
model: BaseModel,
|
||||
include: Optional[Set[str]] = None,
|
||||
exclude: Optional[Set[str]] = None,
|
||||
by_alias: bool = False,
|
||||
exclude_unset: bool = False,
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
) -> Dict[str, Any]:
|
||||
return model.dict(
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
)
|
||||
|
||||
def type_validate_python(type_: Type[T], data: Any) -> T:
|
||||
"""Validate data with given type."""
|
||||
return parse_obj_as(type_, data)
|
||||
|
||||
def type_validate_json(type_: Type[T], data: Union[str, bytes]) -> T:
|
||||
"""Validate JSON with given type."""
|
||||
return parse_raw_as(type_, data)
|
||||
|
||||
def custom_validation(class_: Type["CVC"]) -> Type["CVC"]:
|
||||
"""Do nothing in pydantic v1"""
|
||||
return class_
|
@@ -12,76 +12,256 @@ FrontMatter:
|
||||
"""
|
||||
|
||||
import os
|
||||
import abc
|
||||
import json
|
||||
from pathlib import Path
|
||||
from datetime import timedelta
|
||||
from ipaddress import IPv4Address
|
||||
from typing import TYPE_CHECKING, Any, Set, Dict, Tuple, Union, Mapping, Optional
|
||||
|
||||
from pydantic.utils import deep_update
|
||||
from pydantic.fields import Undefined, UndefinedType
|
||||
from pydantic import Extra, Field, BaseSettings, IPvAnyAddress
|
||||
from pydantic.env_settings import (
|
||||
DotenvType,
|
||||
SettingsError,
|
||||
EnvSettingsSource,
|
||||
InitSettingsSource,
|
||||
SettingsSourceCallable,
|
||||
from typing_extensions import TypeAlias, get_args, get_origin
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Set,
|
||||
Dict,
|
||||
List,
|
||||
Type,
|
||||
Tuple,
|
||||
Union,
|
||||
Mapping,
|
||||
Optional,
|
||||
)
|
||||
|
||||
from dotenv import dotenv_values
|
||||
from pydantic import Field, BaseModel
|
||||
from pydantic.networks import IPvAnyAddress
|
||||
|
||||
from nonebot.log import logger
|
||||
from nonebot.typing import origin_is_union
|
||||
from nonebot.utils import deep_update, type_is_complex, lenient_issubclass
|
||||
from nonebot.compat import (
|
||||
PYDANTIC_V2,
|
||||
ConfigDict,
|
||||
ModelField,
|
||||
PydanticUndefined,
|
||||
PydanticUndefinedType,
|
||||
model_config,
|
||||
model_fields,
|
||||
)
|
||||
|
||||
DOTENV_TYPE: TypeAlias = Union[
|
||||
Path, str, List[Union[Path, str]], Tuple[Union[Path, str], ...]
|
||||
]
|
||||
|
||||
ENV_FILE_SENTINEL = Path("")
|
||||
|
||||
|
||||
class CustomEnvSettings(EnvSettingsSource):
|
||||
def __call__(self, settings: BaseSettings) -> Dict[str, Any]:
|
||||
class SettingsError(ValueError): ...
|
||||
|
||||
|
||||
class BaseSettingsSource(abc.ABC):
|
||||
def __init__(self, settings_cls: Type["BaseSettings"]) -> None:
|
||||
self.settings_cls = settings_cls
|
||||
|
||||
@property
|
||||
def config(self) -> "SettingsConfig":
|
||||
return model_config(self.settings_cls)
|
||||
|
||||
@abc.abstractmethod
|
||||
def __call__(self) -> Dict[str, Any]:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class InitSettingsSource(BaseSettingsSource):
|
||||
__slots__ = ("init_kwargs",)
|
||||
|
||||
def __init__(
|
||||
self, settings_cls: Type["BaseSettings"], init_kwargs: Dict[str, Any]
|
||||
) -> None:
|
||||
self.init_kwargs = init_kwargs
|
||||
super().__init__(settings_cls)
|
||||
|
||||
def __call__(self) -> Dict[str, Any]:
|
||||
return self.init_kwargs
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"InitSettingsSource(init_kwargs={self.init_kwargs!r})"
|
||||
|
||||
|
||||
class DotEnvSettingsSource(BaseSettingsSource):
|
||||
def __init__(
|
||||
self,
|
||||
settings_cls: Type["BaseSettings"],
|
||||
env_file: Optional[DOTENV_TYPE] = ENV_FILE_SENTINEL,
|
||||
env_file_encoding: Optional[str] = None,
|
||||
case_sensitive: Optional[bool] = None,
|
||||
env_nested_delimiter: Optional[str] = None,
|
||||
) -> None:
|
||||
super().__init__(settings_cls)
|
||||
self.env_file = (
|
||||
env_file
|
||||
if env_file is not ENV_FILE_SENTINEL
|
||||
else self.config.get("env_file", (".env",))
|
||||
)
|
||||
self.env_file_encoding = (
|
||||
env_file_encoding
|
||||
if env_file_encoding is not None
|
||||
else self.config.get("env_file_encoding", "utf-8")
|
||||
)
|
||||
self.case_sensitive = (
|
||||
case_sensitive
|
||||
if case_sensitive is not None
|
||||
else self.config.get("case_sensitive", False)
|
||||
)
|
||||
self.env_nested_delimiter = (
|
||||
env_nested_delimiter
|
||||
if env_nested_delimiter is not None
|
||||
else self.config.get("env_nested_delimiter", None)
|
||||
)
|
||||
|
||||
def _apply_case_sensitive(self, var_name: str) -> str:
|
||||
return var_name if self.case_sensitive else var_name.lower()
|
||||
|
||||
def _field_is_complex(self, field: ModelField) -> Tuple[bool, bool]:
|
||||
if type_is_complex(field.annotation):
|
||||
return True, False
|
||||
elif origin_is_union(get_origin(field.annotation)) and any(
|
||||
type_is_complex(arg) for arg in get_args(field.annotation)
|
||||
):
|
||||
return True, True
|
||||
return False, False
|
||||
|
||||
def _parse_env_vars(
|
||||
self, env_vars: Mapping[str, Optional[str]]
|
||||
) -> Dict[str, Optional[str]]:
|
||||
return {
|
||||
self._apply_case_sensitive(key): value for key, value in env_vars.items()
|
||||
}
|
||||
|
||||
def _read_env_file(self, file_path: Path) -> Dict[str, Optional[str]]:
|
||||
file_vars = dotenv_values(file_path, encoding=self.env_file_encoding)
|
||||
return self._parse_env_vars(file_vars)
|
||||
|
||||
def _read_env_files(self) -> Dict[str, Optional[str]]:
|
||||
env_files = self.env_file
|
||||
if env_files is None:
|
||||
return {}
|
||||
|
||||
if isinstance(env_files, (str, os.PathLike)):
|
||||
env_files = [env_files]
|
||||
|
||||
dotenv_vars: Dict[str, Optional[str]] = {}
|
||||
for env_file in env_files:
|
||||
env_path = Path(env_file).expanduser()
|
||||
if env_path.is_file():
|
||||
dotenv_vars.update(self._read_env_file(env_path))
|
||||
return dotenv_vars
|
||||
|
||||
def _next_field(
|
||||
self, field: Optional[ModelField], key: str
|
||||
) -> Optional[ModelField]:
|
||||
if not field or origin_is_union(get_origin(field.annotation)):
|
||||
return None
|
||||
elif field.annotation and lenient_issubclass(field.annotation, BaseModel):
|
||||
for field in model_fields(field.annotation):
|
||||
if field.name == key:
|
||||
return field
|
||||
return None
|
||||
|
||||
def _explode_env_vars(
|
||||
self,
|
||||
field: ModelField,
|
||||
env_vars: Dict[str, Optional[str]],
|
||||
env_file_vars: Dict[str, Optional[str]],
|
||||
) -> Dict[str, Any]:
|
||||
if self.env_nested_delimiter is None:
|
||||
return {}
|
||||
|
||||
prefix = f"{field.name}{self.env_nested_delimiter}"
|
||||
result: Dict[str, Any] = {}
|
||||
for env_name, env_val in env_vars.items():
|
||||
if not env_name.startswith(prefix):
|
||||
continue
|
||||
|
||||
# delete from file vars when used
|
||||
if env_name in env_file_vars:
|
||||
del env_file_vars[env_name]
|
||||
|
||||
_, *keys, last_key = env_name.split(self.env_nested_delimiter)
|
||||
env_var = result
|
||||
target_field: Optional[ModelField] = field
|
||||
for key in keys:
|
||||
target_field = self._next_field(target_field, key)
|
||||
env_var = env_var.setdefault(key, {})
|
||||
|
||||
target_field = self._next_field(target_field, last_key)
|
||||
if target_field and env_val:
|
||||
is_complex, allow_parse_failure = self._field_is_complex(target_field)
|
||||
if is_complex:
|
||||
try:
|
||||
env_val = json.loads(env_val)
|
||||
except ValueError as e:
|
||||
if not allow_parse_failure:
|
||||
raise SettingsError(
|
||||
f'error parsing env var "{env_name}"'
|
||||
) from e
|
||||
|
||||
env_var[last_key] = env_val
|
||||
|
||||
return result
|
||||
|
||||
def __call__(self) -> Dict[str, Any]:
|
||||
"""从环境变量和 dotenv 配置文件中读取配置项。"""
|
||||
|
||||
d: Dict[str, Any] = {}
|
||||
|
||||
if settings.__config__.case_sensitive:
|
||||
env_vars: Mapping[str, Optional[str]] = os.environ # pragma: no cover
|
||||
else:
|
||||
env_vars = {k.lower(): v for k, v in os.environ.items()}
|
||||
|
||||
env_file_vars = self._read_env_files(settings.__config__.case_sensitive)
|
||||
env_vars = self._parse_env_vars(os.environ)
|
||||
env_file_vars = self._read_env_files()
|
||||
env_vars = {**env_file_vars, **env_vars}
|
||||
|
||||
for field in settings.__fields__.values():
|
||||
env_val: Union[str, None, UndefinedType] = Undefined
|
||||
for env_name in field.field_info.extra["env_names"]:
|
||||
env_val = env_vars.get(env_name, Undefined)
|
||||
for field in model_fields(self.settings_cls):
|
||||
field_name = field.name
|
||||
env_name = self._apply_case_sensitive(field_name)
|
||||
|
||||
# try get values from env vars
|
||||
env_val = env_vars.get(env_name, PydanticUndefined)
|
||||
# delete from file vars when used
|
||||
if env_name in env_file_vars:
|
||||
del env_file_vars[env_name]
|
||||
if env_val is not Undefined:
|
||||
break
|
||||
|
||||
is_complex, allow_parse_failure = self.field_is_complex(field)
|
||||
is_complex, allow_parse_failure = self._field_is_complex(field)
|
||||
if is_complex:
|
||||
if isinstance(env_val, UndefinedType):
|
||||
if isinstance(env_val, PydanticUndefinedType):
|
||||
# field is complex but no value found so far, try explode_env_vars
|
||||
if env_val_built := self.explode_env_vars(field, env_vars):
|
||||
d[field.alias] = env_val_built
|
||||
if env_val_built := self._explode_env_vars(
|
||||
field, env_vars, env_file_vars
|
||||
):
|
||||
d[field_name] = env_val_built
|
||||
elif env_val is None:
|
||||
d[field.alias] = env_val
|
||||
d[field_name] = env_val
|
||||
else:
|
||||
# field is complex and there's a value
|
||||
# decode that as JSON, then add explode_env_vars
|
||||
try:
|
||||
env_val = settings.__config__.parse_env_var(field.name, env_val)
|
||||
env_val = json.loads(env_val)
|
||||
except ValueError as e:
|
||||
if not allow_parse_failure:
|
||||
raise SettingsError(
|
||||
f'error parsing env var "{env_name}"' # type: ignore
|
||||
f'error parsing env var "{env_name}"'
|
||||
) from e
|
||||
|
||||
if isinstance(env_val, dict):
|
||||
d[field.alias] = deep_update(
|
||||
env_val, self.explode_env_vars(field, env_vars)
|
||||
# field value is a dict
|
||||
# try explode_env_vars to find more sub-values
|
||||
d[field_name] = deep_update(
|
||||
env_val,
|
||||
self._explode_env_vars(field, env_vars, env_file_vars),
|
||||
)
|
||||
else:
|
||||
d[field.alias] = env_val
|
||||
elif not isinstance(env_val, UndefinedType):
|
||||
d[field_name] = env_val
|
||||
elif env_val is not PydanticUndefined:
|
||||
# simplest case, field is not complex
|
||||
# we only need to add the value if it was found
|
||||
d[field.alias] = env_val
|
||||
d[field_name] = env_val
|
||||
|
||||
# remain user custom config
|
||||
for env_name in env_file_vars:
|
||||
@@ -89,7 +269,7 @@ class CustomEnvSettings(EnvSettingsSource):
|
||||
if env_val and (val_striped := env_val.strip()):
|
||||
# there's a value, decode that as JSON
|
||||
try:
|
||||
env_val = settings.__config__.parse_env_var(env_name, val_striped)
|
||||
env_val = json.loads(val_striped)
|
||||
except ValueError:
|
||||
logger.trace(
|
||||
"Error while parsing JSON for "
|
||||
@@ -113,38 +293,80 @@ class CustomEnvSettings(EnvSettingsSource):
|
||||
return d
|
||||
|
||||
|
||||
class BaseConfig(BaseSettings):
|
||||
if PYDANTIC_V2: # pragma: pydantic-v2
|
||||
|
||||
class SettingsConfig(ConfigDict, total=False):
|
||||
env_file: Optional[DOTENV_TYPE]
|
||||
env_file_encoding: str
|
||||
case_sensitive: bool
|
||||
env_nested_delimiter: Optional[str]
|
||||
|
||||
else: # pragma: pydantic-v1
|
||||
|
||||
class SettingsConfig(ConfigDict):
|
||||
env_file: Optional[DOTENV_TYPE]
|
||||
env_file_encoding: str
|
||||
case_sensitive: bool
|
||||
env_nested_delimiter: Optional[str]
|
||||
|
||||
|
||||
class BaseSettings(BaseModel):
|
||||
if TYPE_CHECKING:
|
||||
# dummy getattr for pylance checking, actually not used
|
||||
def __getattr__(self, name: str) -> Any: # pragma: no cover
|
||||
return self.__dict__.get(name)
|
||||
|
||||
class Config:
|
||||
extra = Extra.allow
|
||||
if PYDANTIC_V2: # pragma: pydantic-v2
|
||||
model_config: SettingsConfig = SettingsConfig(
|
||||
extra="allow",
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
case_sensitive=False,
|
||||
env_nested_delimiter="__",
|
||||
)
|
||||
else: # pragma: pydantic-v1
|
||||
|
||||
class Config(SettingsConfig):
|
||||
extra = "allow" # type: ignore
|
||||
env_file = ".env"
|
||||
env_file_encoding = "utf-8"
|
||||
case_sensitive = False
|
||||
env_nested_delimiter = "__"
|
||||
|
||||
@classmethod
|
||||
def customise_sources(
|
||||
cls,
|
||||
init_settings: InitSettingsSource,
|
||||
env_settings: EnvSettingsSource,
|
||||
file_secret_settings: SettingsSourceCallable,
|
||||
) -> Tuple[SettingsSourceCallable, ...]:
|
||||
common_config = init_settings.init_kwargs.pop("_common_config", {})
|
||||
return (
|
||||
init_settings,
|
||||
CustomEnvSettings(
|
||||
env_settings.env_file,
|
||||
env_settings.env_file_encoding,
|
||||
env_settings.env_nested_delimiter,
|
||||
env_settings.env_prefix_len,
|
||||
),
|
||||
InitSettingsSource(common_config),
|
||||
file_secret_settings,
|
||||
def __init__(
|
||||
__settings_self__, # pyright: ignore[reportSelfClsParameterName]
|
||||
_env_file: Optional[DOTENV_TYPE] = ENV_FILE_SENTINEL,
|
||||
_env_file_encoding: Optional[str] = None,
|
||||
_env_nested_delimiter: Optional[str] = None,
|
||||
**values: Any,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
**__settings_self__._settings_build_values(
|
||||
values,
|
||||
env_file=_env_file,
|
||||
env_file_encoding=_env_file_encoding,
|
||||
env_nested_delimiter=_env_nested_delimiter,
|
||||
)
|
||||
)
|
||||
|
||||
def _settings_build_values(
|
||||
self,
|
||||
init_kwargs: Dict[str, Any],
|
||||
env_file: Optional[DOTENV_TYPE] = None,
|
||||
env_file_encoding: Optional[str] = None,
|
||||
env_nested_delimiter: Optional[str] = None,
|
||||
) -> Dict[str, Any]:
|
||||
init_settings = InitSettingsSource(self.__class__, init_kwargs=init_kwargs)
|
||||
env_settings = DotEnvSettingsSource(
|
||||
self.__class__,
|
||||
env_file=env_file,
|
||||
env_file_encoding=env_file_encoding,
|
||||
env_nested_delimiter=env_nested_delimiter,
|
||||
)
|
||||
return deep_update(env_settings(), init_settings())
|
||||
|
||||
class Env(BaseConfig):
|
||||
|
||||
class Env(BaseSettings):
|
||||
"""运行环境配置。大小写不敏感。
|
||||
|
||||
将会从 **环境变量** > **dotenv 配置文件** 的优先级读取环境信息。
|
||||
@@ -156,11 +378,8 @@ class Env(BaseConfig):
|
||||
NoneBot 将从 `.env.{environment}` 文件中加载配置。
|
||||
"""
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
|
||||
|
||||
class Config(BaseConfig):
|
||||
class Config(BaseSettings):
|
||||
"""NoneBot 主要配置。大小写不敏感。
|
||||
|
||||
除了 NoneBot 的配置项外,还可以自行添加配置项到 `.env.{environment}` 文件中。
|
||||
@@ -169,7 +388,8 @@ class Config(BaseConfig):
|
||||
配置方法参考: [配置](https://nonebot.dev/docs/appendices/config)
|
||||
"""
|
||||
|
||||
_env_file: DotenvType = ".env", ".env.prod"
|
||||
if TYPE_CHECKING:
|
||||
_env_file: Optional[DOTENV_TYPE] = ".env", ".env.prod"
|
||||
|
||||
# nonebot configs
|
||||
driver: str = "~fastapi"
|
||||
@@ -241,9 +461,8 @@ class Config(BaseConfig):
|
||||
|
||||
用法:
|
||||
```conf
|
||||
SESSION_EXPIRE_TIMEOUT=120 # 单位: 秒
|
||||
SESSION_EXPIRE_TIMEOUT=[DD ][HH:MM]SS[.ffffff]
|
||||
SESSION_EXPIRE_TIMEOUT=P[DD]DT[HH]H[MM]M[SS]S # ISO 8601
|
||||
SESSION_EXPIRE_TIMEOUT=[-][DD]D[,][HH:MM:]SS[.ffffff]
|
||||
SESSION_EXPIRE_TIMEOUT=[±]P[DD]DT[HH]H[MM]M[SS]S # ISO 8601
|
||||
```
|
||||
"""
|
||||
|
||||
@@ -254,11 +473,19 @@ class Config(BaseConfig):
|
||||
# custom configs can be assigned during nonebot.init
|
||||
# or from env file using json loads
|
||||
|
||||
class Config:
|
||||
if PYDANTIC_V2: # pragma: pydantic-v2
|
||||
model_config = SettingsConfig(env_file=(".env", ".env.prod"))
|
||||
else: # pragma: pydantic-v1
|
||||
|
||||
class Config(SettingsConfig):
|
||||
env_file = ".env", ".env.prod"
|
||||
|
||||
|
||||
__autodoc__ = {
|
||||
"CustomEnvSettings": False,
|
||||
"BaseConfig": False,
|
||||
"SettingsError": False,
|
||||
"BaseSettingsSource": False,
|
||||
"InitSettingsSource": False,
|
||||
"DotEnvSettingsSource": False,
|
||||
"SettingsConfig": False,
|
||||
"BaseSettings": False,
|
||||
}
|
||||
|
@@ -24,14 +24,11 @@ from typing import (
|
||||
cast,
|
||||
)
|
||||
|
||||
from pydantic import BaseConfig
|
||||
from pydantic.schema import get_annotation_from_field_info
|
||||
from pydantic.fields import Required, FieldInfo, Undefined, ModelField
|
||||
|
||||
from nonebot.log import logger
|
||||
from nonebot.typing import _DependentCallable
|
||||
from nonebot.exception import SkippedException
|
||||
from nonebot.utils import run_sync, is_coroutine_callable
|
||||
from nonebot.compat import FieldInfo, ModelField, PydanticUndefined
|
||||
|
||||
from .utils import check_field_type, get_typed_signature
|
||||
|
||||
@@ -45,6 +42,10 @@ class Param(abc.ABC, FieldInfo):
|
||||
继承自 `pydantic.fields.FieldInfo`,用于描述参数信息(不包括参数名)。
|
||||
"""
|
||||
|
||||
def __init__(self, *args, validate: bool = False, **kwargs: Any) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.validate = validate
|
||||
|
||||
@classmethod
|
||||
def _check_param(
|
||||
cls, param: inspect.Parameter, allow_types: Tuple[Type["Param"], ...]
|
||||
@@ -65,10 +66,6 @@ class Param(abc.ABC, FieldInfo):
|
||||
return
|
||||
|
||||
|
||||
class CustomConfig(BaseConfig):
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Dependent(Generic[R]):
|
||||
"""依赖注入容器
|
||||
@@ -97,6 +94,7 @@ class Dependent(Generic[R]):
|
||||
)
|
||||
|
||||
async def __call__(self, **kwargs: Any) -> R:
|
||||
try:
|
||||
# do pre-check
|
||||
await self.check(**kwargs)
|
||||
|
||||
@@ -108,21 +106,20 @@ class Dependent(Generic[R]):
|
||||
return await cast(Callable[..., Awaitable[R]], self.call)(**values)
|
||||
else:
|
||||
return await run_sync(cast(Callable[..., R], self.call))(**values)
|
||||
except SkippedException as e:
|
||||
logger.trace(f"{self} skipped due to {e}")
|
||||
raise
|
||||
|
||||
@staticmethod
|
||||
def parse_params(
|
||||
call: _DependentCallable[R], allow_types: Tuple[Type[Param], ...]
|
||||
) -> Tuple[ModelField]:
|
||||
) -> Tuple[ModelField, ...]:
|
||||
fields: List[ModelField] = []
|
||||
params = get_typed_signature(call).parameters.values()
|
||||
|
||||
for param in params:
|
||||
default_value = Required
|
||||
if param.default != param.empty:
|
||||
default_value = param.default
|
||||
|
||||
if isinstance(default_value, Param):
|
||||
field_info = default_value
|
||||
if isinstance(param.default, Param):
|
||||
field_info = param.default
|
||||
else:
|
||||
for allow_type in allow_types:
|
||||
if field_info := allow_type._check_param(param, allow_types):
|
||||
@@ -133,25 +130,13 @@ class Dependent(Generic[R]):
|
||||
f"for function {call} with type {param.annotation}"
|
||||
)
|
||||
|
||||
default_value = field_info.default
|
||||
|
||||
annotation: Any = Any
|
||||
required = default_value == Required
|
||||
if param.annotation != param.empty:
|
||||
if param.annotation is not param.empty:
|
||||
annotation = param.annotation
|
||||
annotation = get_annotation_from_field_info(
|
||||
annotation, field_info, param.name
|
||||
)
|
||||
|
||||
fields.append(
|
||||
ModelField(
|
||||
name=param.name,
|
||||
type_=annotation,
|
||||
class_validators=None,
|
||||
model_config=CustomConfig,
|
||||
default=None if required else default_value,
|
||||
required=required,
|
||||
field_info=field_info,
|
||||
ModelField.construct(
|
||||
name=param.name, annotation=annotation, field_info=field_info
|
||||
)
|
||||
)
|
||||
|
||||
@@ -191,25 +176,18 @@ class Dependent(Generic[R]):
|
||||
return cls(call, params, parameterless_params)
|
||||
|
||||
async def check(self, **params: Any) -> None:
|
||||
try:
|
||||
await asyncio.gather(*(param._check(**params) for param in self.parameterless))
|
||||
await asyncio.gather(
|
||||
*(param._check(**params) for param in self.parameterless)
|
||||
*(cast(Param, param.field_info)._check(**params) for param in self.params)
|
||||
)
|
||||
await asyncio.gather(
|
||||
*(
|
||||
cast(Param, param.field_info)._check(**params)
|
||||
for param in self.params
|
||||
)
|
||||
)
|
||||
except SkippedException as e:
|
||||
logger.trace(f"{self} skipped due to {e}")
|
||||
raise
|
||||
|
||||
async def _solve_field(self, field: ModelField, params: Dict[str, Any]) -> Any:
|
||||
value = await cast(Param, field.field_info)._solve(**params)
|
||||
if value is Undefined:
|
||||
param = cast(Param, field.field_info)
|
||||
value = await param._solve(**params)
|
||||
if value is PydanticUndefined:
|
||||
value = field.get_default()
|
||||
return check_field_type(field, value)
|
||||
v = check_field_type(field, value)
|
||||
return v if param.validate else value
|
||||
|
||||
async def solve(self, **params: Any) -> Dict[str, Any]:
|
||||
# solve parameterless
|
||||
|
@@ -5,15 +5,13 @@ FrontMatter:
|
||||
"""
|
||||
|
||||
import inspect
|
||||
from typing import Any, Dict, TypeVar, Callable, ForwardRef
|
||||
from typing import Any, Dict, Callable, ForwardRef
|
||||
|
||||
from loguru import logger
|
||||
from pydantic.fields import ModelField
|
||||
from pydantic.typing import evaluate_forwardref
|
||||
|
||||
from nonebot.exception import TypeMisMatch
|
||||
|
||||
V = TypeVar("V")
|
||||
from nonebot.typing import evaluate_forwardref
|
||||
from nonebot.compat import DEFAULT_CONFIG, ModelField, model_field_validate
|
||||
|
||||
|
||||
def get_typed_signature(call: Callable[..., Any]) -> inspect.Signature:
|
||||
@@ -49,10 +47,10 @@ def get_typed_annotation(param: inspect.Parameter, globalns: Dict[str, Any]) ->
|
||||
return annotation
|
||||
|
||||
|
||||
def check_field_type(field: ModelField, value: V) -> V:
|
||||
def check_field_type(field: ModelField, value: Any) -> Any:
|
||||
"""检查字段类型是否匹配"""
|
||||
|
||||
_, errs_ = field.validate(value, {}, loc=())
|
||||
if errs_:
|
||||
try:
|
||||
return model_field_validate(field, value, DEFAULT_CONFIG)
|
||||
except ValueError:
|
||||
raise TypeMisMatch(field, value)
|
||||
return value
|
||||
|
@@ -8,30 +8,40 @@ FrontMatter:
|
||||
"""
|
||||
|
||||
from nonebot.internal.driver import URL as URL
|
||||
from nonebot.internal.driver import Mixin as Mixin
|
||||
from nonebot.internal.driver import Driver as Driver
|
||||
from nonebot.internal.driver import Cookies as Cookies
|
||||
from nonebot.internal.driver import Request as Request
|
||||
from nonebot.internal.driver import Response as Response
|
||||
from nonebot.internal.driver import ASGIMixin as ASGIMixin
|
||||
from nonebot.internal.driver import WebSocket as WebSocket
|
||||
from nonebot.internal.driver import HTTPVersion as HTTPVersion
|
||||
from nonebot.internal.driver import ForwardMixin as ForwardMixin
|
||||
from nonebot.internal.driver import ReverseMixin as ReverseMixin
|
||||
from nonebot.internal.driver import ForwardDriver as ForwardDriver
|
||||
from nonebot.internal.driver import ReverseDriver as ReverseDriver
|
||||
from nonebot.internal.driver import combine_driver as combine_driver
|
||||
from nonebot.internal.driver import HTTPClientMixin as HTTPClientMixin
|
||||
from nonebot.internal.driver import HTTPServerSetup as HTTPServerSetup
|
||||
from nonebot.internal.driver import WebSocketClientMixin as WebSocketClientMixin
|
||||
from nonebot.internal.driver import WebSocketServerSetup as WebSocketServerSetup
|
||||
|
||||
__autodoc__ = {
|
||||
"URL": True,
|
||||
"Driver": True,
|
||||
"Cookies": True,
|
||||
"Request": True,
|
||||
"Response": True,
|
||||
"WebSocket": True,
|
||||
"HTTPVersion": True,
|
||||
"Driver": True,
|
||||
"Mixin": True,
|
||||
"ForwardMixin": True,
|
||||
"ForwardDriver": True,
|
||||
"HTTPClientMixin": True,
|
||||
"WebSocketClientMixin": True,
|
||||
"ReverseMixin": True,
|
||||
"ReverseDriver": True,
|
||||
"ASGIMixin": True,
|
||||
"combine_driver": True,
|
||||
"HTTPServerSetup": True,
|
||||
"WebSocketServerSetup": True,
|
||||
|
@@ -16,14 +16,19 @@ FrontMatter:
|
||||
"""
|
||||
|
||||
from typing_extensions import override
|
||||
from typing import Type, AsyncGenerator
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import TYPE_CHECKING, AsyncGenerator
|
||||
|
||||
from nonebot.drivers import Request, Response
|
||||
from nonebot.exception import WebSocketClosed
|
||||
from nonebot.drivers.none import Driver as NoneDriver
|
||||
from nonebot.drivers import WebSocket as BaseWebSocket
|
||||
from nonebot.drivers import HTTPVersion, ForwardMixin, ForwardDriver, combine_driver
|
||||
from nonebot.drivers import (
|
||||
HTTPVersion,
|
||||
HTTPClientMixin,
|
||||
WebSocketClientMixin,
|
||||
combine_driver,
|
||||
)
|
||||
|
||||
try:
|
||||
import aiohttp
|
||||
@@ -34,7 +39,7 @@ except ModuleNotFoundError as e: # pragma: no cover
|
||||
) from e
|
||||
|
||||
|
||||
class Mixin(ForwardMixin):
|
||||
class Mixin(HTTPClientMixin, WebSocketClientMixin):
|
||||
"""AIOHTTP Mixin"""
|
||||
|
||||
@property
|
||||
@@ -172,5 +177,10 @@ class WebSocket(BaseWebSocket):
|
||||
await self.websocket.send_bytes(data)
|
||||
|
||||
|
||||
Driver: Type[ForwardDriver] = combine_driver(NoneDriver, Mixin) # type: ignore
|
||||
"""AIOHTTP Driver"""
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class Driver(Mixin, NoneDriver): ...
|
||||
|
||||
else:
|
||||
Driver = combine_driver(NoneDriver, Mixin)
|
||||
"""AIOHTTP Driver"""
|
||||
|
@@ -15,24 +15,24 @@ FrontMatter:
|
||||
description: nonebot.drivers.fastapi 模块
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
import contextlib
|
||||
from functools import wraps
|
||||
from typing_extensions import override
|
||||
from typing import Any, Dict, List, Tuple, Union, Optional
|
||||
|
||||
from pydantic import BaseSettings
|
||||
from pydantic import BaseModel
|
||||
|
||||
from nonebot.config import Env
|
||||
from nonebot.drivers import ASGIMixin
|
||||
from nonebot.exception import WebSocketClosed
|
||||
from nonebot.internal.driver import FileTypes
|
||||
from nonebot.drivers import Driver as BaseDriver
|
||||
from nonebot.config import Config as NoneBotConfig
|
||||
from nonebot.drivers import Request as BaseRequest
|
||||
from nonebot.drivers import WebSocket as BaseWebSocket
|
||||
from nonebot.drivers import ReverseDriver, HTTPServerSetup, WebSocketServerSetup
|
||||
|
||||
from ._lifespan import LIFESPAN_FUNC, Lifespan
|
||||
from nonebot.compat import model_dump, type_validate_python
|
||||
from nonebot.drivers import HTTPServerSetup, WebSocketServerSetup
|
||||
|
||||
try:
|
||||
import uvicorn
|
||||
@@ -59,7 +59,7 @@ def catch_closed(func):
|
||||
return decorator
|
||||
|
||||
|
||||
class Config(BaseSettings):
|
||||
class Config(BaseModel):
|
||||
"""FastAPI 驱动框架设置,详情参考 FastAPI 文档"""
|
||||
|
||||
fastapi_openapi_url: Optional[str] = None
|
||||
@@ -83,19 +83,14 @@ class Config(BaseSettings):
|
||||
fastapi_extra: Dict[str, Any] = {}
|
||||
"""传递给 `FastAPI` 的其他参数。"""
|
||||
|
||||
class Config:
|
||||
extra = "ignore"
|
||||
|
||||
|
||||
class Driver(ReverseDriver):
|
||||
class Driver(BaseDriver, ASGIMixin):
|
||||
"""FastAPI 驱动框架。"""
|
||||
|
||||
def __init__(self, env: Env, config: NoneBotConfig):
|
||||
super().__init__(env, config)
|
||||
|
||||
self.fastapi_config: Config = Config(**config.dict())
|
||||
|
||||
self._lifespan = Lifespan()
|
||||
self.fastapi_config: Config = type_validate_python(Config, model_dump(config))
|
||||
|
||||
self._server_app = FastAPI(
|
||||
lifespan=self._lifespan_manager,
|
||||
@@ -153,14 +148,6 @@ class Driver(ReverseDriver):
|
||||
name=setup.name,
|
||||
)
|
||||
|
||||
@override
|
||||
def on_startup(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
return self._lifespan.on_startup(func)
|
||||
|
||||
@override
|
||||
def on_shutdown(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
return self._lifespan.on_shutdown(func)
|
||||
|
||||
@contextlib.asynccontextmanager
|
||||
async def _lifespan_manager(self, app: FastAPI):
|
||||
await self._lifespan.startup()
|
||||
@@ -179,7 +166,7 @@ class Driver(ReverseDriver):
|
||||
**kwargs,
|
||||
):
|
||||
"""使用 `uvicorn` 启动 FastAPI"""
|
||||
super().run(host, port, app, **kwargs)
|
||||
super().run(host, port, app=app, **kwargs)
|
||||
LOGGING_CONFIG = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
|
@@ -15,18 +15,15 @@ FrontMatter:
|
||||
description: nonebot.drivers.httpx 模块
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
from typing_extensions import override
|
||||
from typing import Type, AsyncGenerator
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from nonebot.drivers.none import Driver as NoneDriver
|
||||
from nonebot.drivers import (
|
||||
Request,
|
||||
Response,
|
||||
WebSocket,
|
||||
HTTPVersion,
|
||||
ForwardMixin,
|
||||
ForwardDriver,
|
||||
HTTPClientMixin,
|
||||
combine_driver,
|
||||
)
|
||||
|
||||
@@ -39,7 +36,7 @@ except ModuleNotFoundError as e: # pragma: no cover
|
||||
) from e
|
||||
|
||||
|
||||
class Mixin(ForwardMixin):
|
||||
class Mixin(HTTPClientMixin):
|
||||
"""HTTPX Mixin"""
|
||||
|
||||
@property
|
||||
@@ -72,12 +69,11 @@ class Mixin(ForwardMixin):
|
||||
request=setup,
|
||||
)
|
||||
|
||||
@override
|
||||
@asynccontextmanager
|
||||
async def websocket(self, setup: Request) -> AsyncGenerator[WebSocket, None]:
|
||||
async with super(Mixin, self).websocket(setup) as ws:
|
||||
yield ws
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
Driver: Type[ForwardDriver] = combine_driver(NoneDriver, Mixin) # type: ignore
|
||||
"""HTTPX Driver"""
|
||||
class Driver(Mixin, NoneDriver): ...
|
||||
|
||||
else:
|
||||
Driver = combine_driver(NoneDriver, Mixin)
|
||||
"""HTTPX Driver"""
|
||||
|
@@ -19,8 +19,6 @@ from nonebot.consts import WINDOWS
|
||||
from nonebot.config import Env, Config
|
||||
from nonebot.drivers import Driver as BaseDriver
|
||||
|
||||
from ._lifespan import LIFESPAN_FUNC, Lifespan
|
||||
|
||||
HANDLED_SIGNALS = (
|
||||
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
|
||||
signal.SIGTERM, # Unix signal 15. Sent by `kill <pid>`.
|
||||
@@ -35,8 +33,6 @@ class Driver(BaseDriver):
|
||||
def __init__(self, env: Env, config: Config):
|
||||
super().__init__(env, config)
|
||||
|
||||
self._lifespan = Lifespan()
|
||||
|
||||
self.should_exit: asyncio.Event = asyncio.Event()
|
||||
self.force_exit: bool = False
|
||||
|
||||
@@ -52,16 +48,6 @@ class Driver(BaseDriver):
|
||||
"""none driver 使用的 logger"""
|
||||
return logger
|
||||
|
||||
@override
|
||||
def on_startup(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
"""注册一个启动时执行的函数"""
|
||||
return self._lifespan.on_startup(func)
|
||||
|
||||
@override
|
||||
def on_shutdown(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
"""注册一个停止时执行的函数"""
|
||||
return self._lifespan.on_shutdown(func)
|
||||
|
||||
@override
|
||||
def run(self, *args, **kwargs):
|
||||
"""启动 none driver"""
|
||||
|
@@ -18,28 +18,20 @@ FrontMatter:
|
||||
import asyncio
|
||||
from functools import wraps
|
||||
from typing_extensions import override
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Tuple,
|
||||
Union,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Optional,
|
||||
Coroutine,
|
||||
cast,
|
||||
)
|
||||
from typing import Any, Dict, List, Tuple, Union, Optional, cast
|
||||
|
||||
from pydantic import BaseSettings
|
||||
from pydantic import BaseModel
|
||||
|
||||
from nonebot.config import Env
|
||||
from nonebot.drivers import ASGIMixin
|
||||
from nonebot.exception import WebSocketClosed
|
||||
from nonebot.internal.driver import FileTypes
|
||||
from nonebot.drivers import Driver as BaseDriver
|
||||
from nonebot.config import Config as NoneBotConfig
|
||||
from nonebot.drivers import Request as BaseRequest
|
||||
from nonebot.drivers import WebSocket as BaseWebSocket
|
||||
from nonebot.drivers import ReverseDriver, HTTPServerSetup, WebSocketServerSetup
|
||||
from nonebot.compat import model_dump, type_validate_python
|
||||
from nonebot.drivers import HTTPServerSetup, WebSocketServerSetup
|
||||
|
||||
try:
|
||||
import uvicorn
|
||||
@@ -55,8 +47,6 @@ except ModuleNotFoundError as e: # pragma: no cover
|
||||
"Install with pip: `pip install nonebot2[quart]`"
|
||||
) from e
|
||||
|
||||
_AsyncCallable = TypeVar("_AsyncCallable", bound=Callable[..., Coroutine])
|
||||
|
||||
|
||||
def catch_closed(func):
|
||||
@wraps(func)
|
||||
@@ -69,7 +59,7 @@ def catch_closed(func):
|
||||
return decorator
|
||||
|
||||
|
||||
class Config(BaseSettings):
|
||||
class Config(BaseModel):
|
||||
"""Quart 驱动框架设置"""
|
||||
|
||||
quart_reload: bool = False
|
||||
@@ -85,21 +75,20 @@ class Config(BaseSettings):
|
||||
quart_extra: Dict[str, Any] = {}
|
||||
"""传递给 `Quart` 的其他参数。"""
|
||||
|
||||
class Config:
|
||||
extra = "ignore"
|
||||
|
||||
|
||||
class Driver(ReverseDriver):
|
||||
class Driver(BaseDriver, ASGIMixin):
|
||||
"""Quart 驱动框架"""
|
||||
|
||||
def __init__(self, env: Env, config: NoneBotConfig):
|
||||
super().__init__(env, config)
|
||||
|
||||
self.quart_config = Config(**config.dict())
|
||||
self.quart_config = type_validate_python(Config, model_dump(config))
|
||||
|
||||
self._server_app = Quart(
|
||||
self.__class__.__qualname__, **self.quart_config.quart_extra
|
||||
)
|
||||
self._server_app.before_serving(self._lifespan.startup)
|
||||
self._server_app.after_serving(self._lifespan.shutdown)
|
||||
|
||||
@property
|
||||
@override
|
||||
@@ -148,16 +137,6 @@ class Driver(ReverseDriver):
|
||||
view_func=_handle,
|
||||
)
|
||||
|
||||
@override
|
||||
def on_startup(self, func: _AsyncCallable) -> _AsyncCallable:
|
||||
"""参考文档: [`Startup and Shutdown`](https://pgjones.gitlab.io/quart/how_to_guides/startup_shutdown.html)"""
|
||||
return self.server_app.before_serving(func) # type: ignore
|
||||
|
||||
@override
|
||||
def on_shutdown(self, func: _AsyncCallable) -> _AsyncCallable:
|
||||
"""参考文档: [`Startup and Shutdown`](https://pgjones.gitlab.io/quart/how_to_guides/startup_shutdown.html)"""
|
||||
return self.server_app.after_serving(func) # type: ignore
|
||||
|
||||
@override
|
||||
def run(
|
||||
self,
|
||||
|
@@ -19,14 +19,14 @@ import logging
|
||||
from functools import wraps
|
||||
from contextlib import asynccontextmanager
|
||||
from typing_extensions import ParamSpec, override
|
||||
from typing import Type, Union, TypeVar, Callable, Awaitable, AsyncGenerator
|
||||
from typing import TYPE_CHECKING, Union, TypeVar, Callable, Awaitable, AsyncGenerator
|
||||
|
||||
from nonebot.drivers import Request
|
||||
from nonebot.log import LoguruHandler
|
||||
from nonebot.drivers import Request, Response
|
||||
from nonebot.exception import WebSocketClosed
|
||||
from nonebot.drivers.none import Driver as NoneDriver
|
||||
from nonebot.drivers import WebSocket as BaseWebSocket
|
||||
from nonebot.drivers import ForwardMixin, ForwardDriver, combine_driver
|
||||
from nonebot.drivers import WebSocketClientMixin, combine_driver
|
||||
|
||||
try:
|
||||
from websockets.exceptions import ConnectionClosed
|
||||
@@ -50,15 +50,12 @@ def catch_closed(func: Callable[P, Awaitable[T]]) -> Callable[P, Awaitable[T]]:
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except ConnectionClosed as e:
|
||||
if e.rcvd_then_sent:
|
||||
raise WebSocketClosed(e.rcvd.code, e.rcvd.reason) # type: ignore
|
||||
else:
|
||||
raise WebSocketClosed(e.sent.code, e.sent.reason) # type: ignore
|
||||
raise WebSocketClosed(e.code, e.reason)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
class Mixin(ForwardMixin):
|
||||
class Mixin(WebSocketClientMixin):
|
||||
"""Websockets Mixin"""
|
||||
|
||||
@property
|
||||
@@ -66,10 +63,6 @@ class Mixin(ForwardMixin):
|
||||
def type(self) -> str:
|
||||
return "websockets"
|
||||
|
||||
@override
|
||||
async def request(self, setup: Request) -> Response:
|
||||
return await super(Mixin, self).request(setup)
|
||||
|
||||
@override
|
||||
@asynccontextmanager
|
||||
async def websocket(self, setup: Request) -> AsyncGenerator["WebSocket", None]:
|
||||
@@ -133,5 +126,10 @@ class WebSocket(BaseWebSocket):
|
||||
await self.websocket.send(data)
|
||||
|
||||
|
||||
Driver: Type[ForwardDriver] = combine_driver(NoneDriver, Mixin) # type: ignore
|
||||
"""Websockets Driver"""
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class Driver(Mixin, NoneDriver): ...
|
||||
|
||||
else:
|
||||
Driver = combine_driver(NoneDriver, Mixin)
|
||||
"""Websockets Driver"""
|
||||
|
@@ -31,7 +31,7 @@ FrontMatter:
|
||||
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic.fields import ModelField
|
||||
from nonebot.compat import ModelField
|
||||
|
||||
|
||||
class NoneBotException(Exception):
|
||||
|
@@ -3,14 +3,16 @@ from contextlib import asynccontextmanager
|
||||
from typing import Any, Dict, AsyncGenerator
|
||||
|
||||
from nonebot.config import Config
|
||||
from nonebot.internal.driver._lifespan import LIFESPAN_FUNC
|
||||
from nonebot.internal.driver import (
|
||||
Driver,
|
||||
Request,
|
||||
Response,
|
||||
ASGIMixin,
|
||||
WebSocket,
|
||||
ForwardDriver,
|
||||
ReverseDriver,
|
||||
HTTPClientMixin,
|
||||
HTTPServerSetup,
|
||||
WebSocketClientMixin,
|
||||
WebSocketServerSetup,
|
||||
)
|
||||
|
||||
@@ -72,30 +74,33 @@ class Adapter(abc.ABC):
|
||||
|
||||
def setup_http_server(self, setup: HTTPServerSetup):
|
||||
"""设置一个 HTTP 服务器路由配置"""
|
||||
if not isinstance(self.driver, ReverseDriver):
|
||||
if not isinstance(self.driver, ASGIMixin):
|
||||
raise TypeError("Current driver does not support http server")
|
||||
self.driver.setup_http_server(setup)
|
||||
|
||||
def setup_websocket_server(self, setup: WebSocketServerSetup):
|
||||
"""设置一个 WebSocket 服务器路由配置"""
|
||||
if not isinstance(self.driver, ReverseDriver):
|
||||
if not isinstance(self.driver, ASGIMixin):
|
||||
raise TypeError("Current driver does not support websocket server")
|
||||
self.driver.setup_websocket_server(setup)
|
||||
|
||||
async def request(self, setup: Request) -> Response:
|
||||
"""进行一个 HTTP 客户端请求"""
|
||||
if not isinstance(self.driver, ForwardDriver):
|
||||
if not isinstance(self.driver, HTTPClientMixin):
|
||||
raise TypeError("Current driver does not support http client")
|
||||
return await self.driver.request(setup)
|
||||
|
||||
@asynccontextmanager
|
||||
async def websocket(self, setup: Request) -> AsyncGenerator[WebSocket, None]:
|
||||
"""建立一个 WebSocket 客户端连接请求"""
|
||||
if not isinstance(self.driver, ForwardDriver):
|
||||
if not isinstance(self.driver, WebSocketClientMixin):
|
||||
raise TypeError("Current driver does not support websocket client")
|
||||
async with self.driver.websocket(setup) as ws:
|
||||
yield ws
|
||||
|
||||
def on_ready(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
return self.driver._lifespan.on_ready(func)
|
||||
|
||||
@abc.abstractmethod
|
||||
async def _call_api(self, bot: Bot, api: str, **data: Any) -> Any:
|
||||
"""`Adapter` 实际调用 api 的逻辑实现函数,实现该方法以调用 api。
|
||||
|
@@ -14,8 +14,7 @@ if TYPE_CHECKING:
|
||||
from .message import Message, MessageSegment
|
||||
|
||||
class _ApiCall(Protocol):
|
||||
async def __call__(self, **kwargs: Any) -> Any:
|
||||
...
|
||||
async def __call__(self, **kwargs: Any) -> Any: ...
|
||||
|
||||
|
||||
class Bot(abc.ABC):
|
||||
@@ -106,7 +105,10 @@ class Bot(abc.ABC):
|
||||
logger.debug("Running CalledAPI hooks...")
|
||||
await asyncio.gather(*coros)
|
||||
except MockApiException as e:
|
||||
# mock api result
|
||||
result = e.result
|
||||
# ignore exception
|
||||
exception = None
|
||||
logger.debug(
|
||||
f"Calling API {api} result is mocked. Return {result} instead."
|
||||
)
|
||||
|
@@ -4,6 +4,7 @@ from typing import Any, Type, TypeVar
|
||||
from pydantic import BaseModel
|
||||
|
||||
from nonebot.utils import DataclassEncoder
|
||||
from nonebot.compat import PYDANTIC_V2, ConfigDict
|
||||
|
||||
from .message import Message
|
||||
|
||||
@@ -13,10 +14,16 @@ E = TypeVar("E", bound="Event")
|
||||
class Event(abc.ABC, BaseModel):
|
||||
"""Event 基类。提供获取关键信息的方法,其余信息可直接获取。"""
|
||||
|
||||
class Config:
|
||||
extra = "allow"
|
||||
if PYDANTIC_V2: # pragma: pydantic-v2
|
||||
model_config = ConfigDict(extra="allow")
|
||||
else: # pragma: pydantic-v1
|
||||
|
||||
class Config(ConfigDict):
|
||||
extra = "allow" # type: ignore
|
||||
json_encoders = {Message: DataclassEncoder}
|
||||
|
||||
if not PYDANTIC_V2: # pragma: pydantic-v1
|
||||
|
||||
@classmethod
|
||||
def validate(cls: Type["E"], value: Any) -> "E":
|
||||
if isinstance(value, Event) and not isinstance(value, cls):
|
||||
|
@@ -17,7 +17,7 @@ from typing import (
|
||||
overload,
|
||||
)
|
||||
|
||||
from pydantic import parse_obj_as
|
||||
from nonebot.compat import custom_validation, type_validate_python
|
||||
|
||||
from .template import MessageTemplate
|
||||
|
||||
@@ -25,6 +25,7 @@ TMS = TypeVar("TMS", bound="MessageSegment")
|
||||
TM = TypeVar("TM", bound="Message")
|
||||
|
||||
|
||||
@custom_validation
|
||||
@dataclass
|
||||
class MessageSegment(abc.ABC, Generic[TM]):
|
||||
"""消息段基类"""
|
||||
@@ -65,6 +66,8 @@ class MessageSegment(abc.ABC, Generic[TM]):
|
||||
def _validate(cls, value) -> Self:
|
||||
if isinstance(value, cls):
|
||||
return value
|
||||
if isinstance(value, MessageSegment):
|
||||
raise ValueError(f"Type {type(value)} can not be converted to {cls}")
|
||||
if not isinstance(value, dict):
|
||||
raise ValueError(f"Expected dict for MessageSegment, got {type(value)}")
|
||||
if "type" not in value:
|
||||
@@ -97,6 +100,7 @@ class MessageSegment(abc.ABC, Generic[TM]):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@custom_validation
|
||||
class Message(List[TMS], abc.ABC):
|
||||
"""消息序列
|
||||
|
||||
@@ -158,9 +162,9 @@ class Message(List[TMS], abc.ABC):
|
||||
elif isinstance(value, str):
|
||||
pass
|
||||
elif isinstance(value, dict):
|
||||
value = parse_obj_as(cls.get_segment_class(), value)
|
||||
value = type_validate_python(cls.get_segment_class(), value)
|
||||
elif isinstance(value, Iterable):
|
||||
value = [parse_obj_as(cls.get_segment_class(), v) for v in value]
|
||||
value = [type_validate_python(cls.get_segment_class(), v) for v in value]
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Expected str, dict or iterable for Message, got {type(value)}"
|
||||
@@ -281,7 +285,7 @@ class Message(List[TMS], abc.ABC):
|
||||
消息内是否存在给定消息段或给定类型的消息段
|
||||
"""
|
||||
if isinstance(value, str):
|
||||
return bool(next((seg for seg in self if seg.type == value), None))
|
||||
return next((seg for seg in self if seg.type == value), None) is not None
|
||||
return super().__contains__(value)
|
||||
|
||||
def has(self, value: Union[TMS, str]) -> bool:
|
||||
|
@@ -20,9 +20,16 @@ from typing import (
|
||||
overload,
|
||||
)
|
||||
|
||||
from _string import formatter_field_name_split # type: ignore
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .message import Message, MessageSegment
|
||||
|
||||
def formatter_field_name_split( # noqa: F811
|
||||
field_name: str,
|
||||
) -> Tuple[str, List[Tuple[bool, str]]]: ...
|
||||
|
||||
|
||||
TM = TypeVar("TM", bound="Message")
|
||||
TF = TypeVar("TF", str, "Message")
|
||||
|
||||
@@ -36,26 +43,35 @@ class MessageTemplate(Formatter, Generic[TF]):
|
||||
参数:
|
||||
template: 模板
|
||||
factory: 消息类型工厂,默认为 `str`
|
||||
private_getattr: 是否允许在模板中访问私有属性,默认为 `False`
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(
|
||||
self: "MessageTemplate[str]", template: str, factory: Type[str] = str
|
||||
) -> None:
|
||||
...
|
||||
self: "MessageTemplate[str]",
|
||||
template: str,
|
||||
factory: Type[str] = str,
|
||||
private_getattr: bool = False,
|
||||
) -> None: ...
|
||||
|
||||
@overload
|
||||
def __init__(
|
||||
self: "MessageTemplate[TM]", template: Union[str, TM], factory: Type[TM]
|
||||
) -> None:
|
||||
...
|
||||
self: "MessageTemplate[TM]",
|
||||
template: Union[str, TM],
|
||||
factory: Type[TM],
|
||||
private_getattr: bool = False,
|
||||
) -> None: ...
|
||||
|
||||
def __init__(
|
||||
self, template: Union[str, TM], factory: Union[Type[str], Type[TM]] = str
|
||||
self,
|
||||
template: Union[str, TM],
|
||||
factory: Union[Type[str], Type[TM]] = str,
|
||||
private_getattr: bool = False,
|
||||
) -> None:
|
||||
self.template: TF = template # type: ignore
|
||||
self.factory: Type[TF] = factory # type: ignore
|
||||
self.format_specs: Dict[str, FormatSpecFunc] = {}
|
||||
self.private_getattr = private_getattr
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"MessageTemplate({self.template!r}, factory={self.factory!r})"
|
||||
@@ -167,6 +183,19 @@ class MessageTemplate(Formatter, Generic[TF]):
|
||||
|
||||
return functools.reduce(self._add, results), auto_arg_index
|
||||
|
||||
def get_field(
|
||||
self, field_name: str, args: Sequence[Any], kwargs: Mapping[str, Any]
|
||||
) -> Tuple[Any, Union[int, str]]:
|
||||
first, rest = formatter_field_name_split(field_name)
|
||||
obj = self.get_value(first, args, kwargs)
|
||||
|
||||
for is_attr, value in rest:
|
||||
if not self.private_getattr and value.startswith("_"):
|
||||
raise ValueError("Cannot access private attribute")
|
||||
obj = getattr(obj, value) if is_attr else obj[value]
|
||||
|
||||
return obj, first
|
||||
|
||||
def format_field(self, value: Any, format_spec: str) -> Any:
|
||||
formatter: Optional[FormatSpecFunc] = self.format_specs.get(format_spec)
|
||||
if formatter is None and not issubclass(self.factory, str):
|
||||
|
@@ -1,8 +1,9 @@
|
||||
from .model import URL as URL
|
||||
from .model import RawURL as RawURL
|
||||
from .driver import Driver as Driver
|
||||
from .abstract import Mixin as Mixin
|
||||
from .model import Cookies as Cookies
|
||||
from .model import Request as Request
|
||||
from .abstract import Driver as Driver
|
||||
from .model import FileType as FileType
|
||||
from .model import Response as Response
|
||||
from .model import DataTypes as DataTypes
|
||||
@@ -10,16 +11,20 @@ from .model import FileTypes as FileTypes
|
||||
from .model import WebSocket as WebSocket
|
||||
from .model import FilesTypes as FilesTypes
|
||||
from .model import QueryTypes as QueryTypes
|
||||
from .abstract import ASGIMixin as ASGIMixin
|
||||
from .model import CookieTypes as CookieTypes
|
||||
from .model import FileContent as FileContent
|
||||
from .model import HTTPVersion as HTTPVersion
|
||||
from .model import HeaderTypes as HeaderTypes
|
||||
from .model import SimpleQuery as SimpleQuery
|
||||
from .model import ContentTypes as ContentTypes
|
||||
from .driver import ForwardMixin as ForwardMixin
|
||||
from .model import QueryVariable as QueryVariable
|
||||
from .driver import ForwardDriver as ForwardDriver
|
||||
from .driver import ReverseDriver as ReverseDriver
|
||||
from .driver import combine_driver as combine_driver
|
||||
from .abstract import ForwardMixin as ForwardMixin
|
||||
from .abstract import ReverseMixin as ReverseMixin
|
||||
from .abstract import ForwardDriver as ForwardDriver
|
||||
from .abstract import ReverseDriver as ReverseDriver
|
||||
from .combine import combine_driver as combine_driver
|
||||
from .model import HTTPServerSetup as HTTPServerSetup
|
||||
from .abstract import HTTPClientMixin as HTTPClientMixin
|
||||
from .model import WebSocketServerSetup as WebSocketServerSetup
|
||||
from .abstract import WebSocketClientMixin as WebSocketClientMixin
|
||||
|
@@ -11,6 +11,7 @@ LIFESPAN_FUNC: TypeAlias = Union[SYNC_LIFESPAN_FUNC, ASYNC_LIFESPAN_FUNC]
|
||||
class Lifespan:
|
||||
def __init__(self) -> None:
|
||||
self._startup_funcs: List[LIFESPAN_FUNC] = []
|
||||
self._ready_funcs: List[LIFESPAN_FUNC] = []
|
||||
self._shutdown_funcs: List[LIFESPAN_FUNC] = []
|
||||
|
||||
def on_startup(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
@@ -21,6 +22,10 @@ class Lifespan:
|
||||
self._shutdown_funcs.append(func)
|
||||
return func
|
||||
|
||||
def on_ready(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
self._ready_funcs.append(func)
|
||||
return func
|
||||
|
||||
@staticmethod
|
||||
async def _run_lifespan_func(
|
||||
funcs: List[LIFESPAN_FUNC],
|
||||
@@ -35,6 +40,9 @@ class Lifespan:
|
||||
if self._startup_funcs:
|
||||
await self._run_lifespan_func(self._startup_funcs)
|
||||
|
||||
if self._ready_funcs:
|
||||
await self._run_lifespan_func(self._ready_funcs)
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
if self._shutdown_funcs:
|
||||
await self._run_lifespan_func(self._shutdown_funcs)
|
@@ -1,7 +1,8 @@
|
||||
import abc
|
||||
import asyncio
|
||||
from typing_extensions import TypeAlias
|
||||
from contextlib import AsyncExitStack, asynccontextmanager
|
||||
from typing import TYPE_CHECKING, Any, Set, Dict, Type, Callable, AsyncGenerator
|
||||
from typing import TYPE_CHECKING, Any, Set, Dict, Type, AsyncGenerator
|
||||
|
||||
from nonebot.log import logger
|
||||
from nonebot.config import Env, Config
|
||||
@@ -15,6 +16,7 @@ from nonebot.typing import (
|
||||
T_BotDisconnectionHook,
|
||||
)
|
||||
|
||||
from ._lifespan import LIFESPAN_FUNC, Lifespan
|
||||
from .model import Request, Response, WebSocket, HTTPServerSetup, WebSocketServerSetup
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -25,7 +27,9 @@ BOT_HOOK_PARAMS = [DependParam, BotParam, DefaultParam]
|
||||
|
||||
|
||||
class Driver(abc.ABC):
|
||||
"""Driver 基类。
|
||||
"""驱动器基类。
|
||||
|
||||
驱动器控制框架的启动和停止,适配器的注册,以及机器人生命周期管理。
|
||||
|
||||
参数:
|
||||
env: 包含环境信息的 Env 对象
|
||||
@@ -45,6 +49,8 @@ class Driver(abc.ABC):
|
||||
self.config: Config = config
|
||||
"""全局配置对象"""
|
||||
self._bots: Dict[str, "Bot"] = {}
|
||||
self._bot_tasks: Set[asyncio.Task] = set()
|
||||
self._lifespan = Lifespan()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
@@ -94,15 +100,15 @@ class Driver(abc.ABC):
|
||||
f"<g>Loaded adapters: {escape_tag(', '.join(self._adapters))}</g>"
|
||||
)
|
||||
|
||||
@abc.abstractmethod
|
||||
def on_startup(self, func: Callable) -> Callable:
|
||||
"""注册一个在驱动器启动时执行的函数"""
|
||||
raise NotImplementedError
|
||||
self.on_shutdown(self._cleanup)
|
||||
|
||||
@abc.abstractmethod
|
||||
def on_shutdown(self, func: Callable) -> Callable:
|
||||
"""注册一个在驱动器停止时执行的函数"""
|
||||
raise NotImplementedError
|
||||
def on_startup(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
"""注册一个启动时执行的函数"""
|
||||
return self._lifespan.on_startup(func)
|
||||
|
||||
def on_shutdown(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
"""注册一个停止时执行的函数"""
|
||||
return self._lifespan.on_shutdown(func)
|
||||
|
||||
@classmethod
|
||||
def on_bot_connect(cls, func: T_BotConnectionHook) -> T_BotConnectionHook:
|
||||
@@ -156,7 +162,9 @@ class Driver(abc.ABC):
|
||||
"</bg #f8bbd0></r>"
|
||||
)
|
||||
|
||||
asyncio.create_task(_run_hook(bot))
|
||||
task = asyncio.create_task(_run_hook(bot))
|
||||
task.add_done_callback(self._bot_tasks.discard)
|
||||
self._bot_tasks.add(task)
|
||||
|
||||
def _bot_disconnect(self, bot: "Bot") -> None:
|
||||
"""在连接断开后,调用该函数来注销 bot 对象"""
|
||||
@@ -183,23 +191,49 @@ class Driver(abc.ABC):
|
||||
"</bg #f8bbd0></r>"
|
||||
)
|
||||
|
||||
asyncio.create_task(_run_hook(bot))
|
||||
task = asyncio.create_task(_run_hook(bot))
|
||||
task.add_done_callback(self._bot_tasks.discard)
|
||||
self._bot_tasks.add(task)
|
||||
|
||||
async def _cleanup(self) -> None:
|
||||
"""清理驱动器资源"""
|
||||
if self._bot_tasks:
|
||||
logger.opt(colors=True).debug(
|
||||
"<y>Waiting for running bot connection hooks...</y>"
|
||||
)
|
||||
await asyncio.gather(*self._bot_tasks, return_exceptions=True)
|
||||
|
||||
|
||||
class ForwardMixin(abc.ABC):
|
||||
"""客户端混入基类。"""
|
||||
class Mixin(abc.ABC):
|
||||
"""可与其他驱动器共用的混入基类。"""
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def type(self) -> str:
|
||||
"""客户端驱动类型名称"""
|
||||
"""混入驱动类型名称"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class ForwardMixin(Mixin):
|
||||
"""客户端混入基类。"""
|
||||
|
||||
|
||||
class ReverseMixin(Mixin):
|
||||
"""服务端混入基类。"""
|
||||
|
||||
|
||||
class HTTPClientMixin(ForwardMixin):
|
||||
"""HTTP 客户端混入基类。"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def request(self, setup: Request) -> Response:
|
||||
"""发送一个 HTTP 请求"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class WebSocketClientMixin(ForwardMixin):
|
||||
"""WebSocket 客户端混入基类。"""
|
||||
|
||||
@abc.abstractmethod
|
||||
@asynccontextmanager
|
||||
async def websocket(self, setup: Request) -> AsyncGenerator[WebSocket, None]:
|
||||
@@ -208,12 +242,11 @@ class ForwardMixin(abc.ABC):
|
||||
yield # used for static type checking's generator detection
|
||||
|
||||
|
||||
class ForwardDriver(Driver, ForwardMixin):
|
||||
"""客户端基类。将客户端框架封装,以满足适配器使用。"""
|
||||
class ASGIMixin(ReverseMixin):
|
||||
"""ASGI 服务端基类。
|
||||
|
||||
|
||||
class ReverseDriver(Driver):
|
||||
"""服务端基类。将后端框架封装,以满足适配器使用。"""
|
||||
将后端框架封装,以满足适配器使用。
|
||||
"""
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
@@ -238,24 +271,14 @@ class ReverseDriver(Driver):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def combine_driver(driver: Type[Driver], *mixins: Type[ForwardMixin]) -> Type[Driver]:
|
||||
"""将一个驱动器和多个混入类合并。"""
|
||||
# check first
|
||||
assert issubclass(driver, Driver), "`driver` must be subclass of Driver"
|
||||
assert all(
|
||||
issubclass(m, ForwardMixin) for m in mixins
|
||||
), "`mixins` must be subclass of ForwardMixin"
|
||||
ForwardDriver: TypeAlias = ForwardMixin
|
||||
"""支持客户端请求的驱动器。
|
||||
|
||||
if not mixins:
|
||||
return driver
|
||||
**Deprecated**,请使用 {ref}`nonebot.drivers.ForwardMixin` 或其子类代替。
|
||||
"""
|
||||
|
||||
def type_(self: ForwardDriver) -> str:
|
||||
return (
|
||||
driver.type.__get__(self)
|
||||
+ "+"
|
||||
+ "+".join(x.type.__get__(self) for x in mixins)
|
||||
)
|
||||
ReverseDriver: TypeAlias = ReverseMixin
|
||||
"""支持服务端请求的驱动器。
|
||||
|
||||
return type(
|
||||
"CombinedDriver", (*mixins, driver, ForwardDriver), {"type": property(type_)}
|
||||
) # type: ignore
|
||||
**Deprecated**,请使用 {ref}`nonebot.drivers.ReverseMixin` 或其子类代替。
|
||||
"""
|
42
nonebot/internal/driver/combine.py
Normal file
42
nonebot/internal/driver/combine.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from typing import TYPE_CHECKING, Type, Union, TypeVar, overload
|
||||
|
||||
from .abstract import Mixin, Driver
|
||||
|
||||
D = TypeVar("D", bound="Driver")
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class CombinedDriver(Driver, Mixin): ...
|
||||
|
||||
|
||||
@overload
|
||||
def combine_driver(driver: Type[D]) -> Type[D]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def combine_driver(driver: Type[D], *mixins: Type[Mixin]) -> Type["CombinedDriver"]: ...
|
||||
|
||||
|
||||
def combine_driver(
|
||||
driver: Type[D], *mixins: Type[Mixin]
|
||||
) -> Union[Type[D], Type["CombinedDriver"]]:
|
||||
"""将一个驱动器和多个混入类合并。"""
|
||||
# check first
|
||||
if not issubclass(driver, Driver):
|
||||
raise TypeError("`driver` must be subclass of Driver")
|
||||
if not all(issubclass(m, Mixin) for m in mixins):
|
||||
raise TypeError("`mixins` must be subclass of Mixin")
|
||||
|
||||
if not mixins:
|
||||
return driver
|
||||
|
||||
def type_(self: "CombinedDriver") -> str:
|
||||
return (
|
||||
driver.type.__get__(self) # type: ignore
|
||||
+ "+"
|
||||
+ "+".join(x.type.__get__(self) for x in mixins) # type: ignore
|
||||
)
|
||||
|
||||
return type(
|
||||
"CombinedDriver", (*mixins, driver), {"type": property(type_)}
|
||||
) # type: ignore
|
@@ -125,7 +125,7 @@ class Request:
|
||||
files_ = files.items() if isinstance(files, dict) else files
|
||||
for name, file_info in files_:
|
||||
if not isinstance(file_info, tuple):
|
||||
self.files.append((name, (None, file_info, None)))
|
||||
self.files.append((name, (name, file_info, None)))
|
||||
elif len(file_info) == 2:
|
||||
self.files.append((name, (file_info[0], file_info[1], None)))
|
||||
else:
|
||||
|
@@ -6,6 +6,7 @@ matchers = MatcherManager()
|
||||
|
||||
from .matcher import Matcher as Matcher
|
||||
from .matcher import current_bot as current_bot
|
||||
from .matcher import MatcherSource as MatcherSource
|
||||
from .matcher import current_event as current_event
|
||||
from .matcher import current_handler as current_handler
|
||||
from .matcher import current_matcher as current_matcher
|
||||
|
@@ -1,6 +1,5 @@
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
List,
|
||||
Type,
|
||||
Tuple,
|
||||
@@ -53,7 +52,7 @@ class MatcherManager(MutableMapping[int, List[Type["Matcher"]]]):
|
||||
def __delitem__(self, key: int) -> None:
|
||||
del self.provider[key]
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return isinstance(other, MatcherManager) and self.provider == other.provider
|
||||
|
||||
def keys(self) -> KeysView[int]:
|
||||
@@ -66,12 +65,10 @@ class MatcherManager(MutableMapping[int, List[Type["Matcher"]]]):
|
||||
return self.provider.items()
|
||||
|
||||
@overload
|
||||
def get(self, key: int) -> Optional[List[Type["Matcher"]]]:
|
||||
...
|
||||
def get(self, key: int) -> Optional[List[Type["Matcher"]]]: ...
|
||||
|
||||
@overload
|
||||
def get(self, key: int, default: T) -> Union[List[Type["Matcher"]], T]:
|
||||
...
|
||||
def get(self, key: int, default: T) -> Union[List[Type["Matcher"]], T]: ...
|
||||
|
||||
def get(
|
||||
self, key: int, default: Optional[T] = None
|
||||
|
@@ -1,4 +1,9 @@
|
||||
import sys
|
||||
import inspect
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
from types import ModuleType
|
||||
from dataclasses import dataclass
|
||||
from contextvars import ContextVar
|
||||
from typing_extensions import Self
|
||||
from datetime import datetime, timedelta
|
||||
@@ -8,6 +13,7 @@ from typing import (
|
||||
Any,
|
||||
List,
|
||||
Type,
|
||||
Tuple,
|
||||
Union,
|
||||
TypeVar,
|
||||
Callable,
|
||||
@@ -20,7 +26,8 @@ from typing import (
|
||||
|
||||
from nonebot.log import logger
|
||||
from nonebot.internal.rule import Rule
|
||||
from nonebot.dependencies import Dependent
|
||||
from nonebot.utils import classproperty
|
||||
from nonebot.dependencies import Param, Dependent
|
||||
from nonebot.internal.permission import User, Permission
|
||||
from nonebot.internal.adapter import (
|
||||
Bot,
|
||||
@@ -74,15 +81,51 @@ current_matcher: ContextVar["Matcher"] = ContextVar("current_matcher")
|
||||
current_handler: ContextVar[Dependent] = ContextVar("current_handler")
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatcherSource:
|
||||
"""Matcher 源代码上下文信息"""
|
||||
|
||||
plugin_name: Optional[str] = None
|
||||
"""事件响应器所在插件名称"""
|
||||
module_name: Optional[str] = None
|
||||
"""事件响应器所在插件模块的路径名"""
|
||||
lineno: Optional[int] = None
|
||||
"""事件响应器所在行号"""
|
||||
|
||||
@property
|
||||
def plugin(self) -> Optional["Plugin"]:
|
||||
"""事件响应器所在插件"""
|
||||
from nonebot.plugin import get_plugin
|
||||
|
||||
if self.plugin_name is not None:
|
||||
return get_plugin(self.plugin_name)
|
||||
|
||||
@property
|
||||
def module(self) -> Optional[ModuleType]:
|
||||
if self.module_name is not None:
|
||||
return sys.modules.get(self.module_name)
|
||||
|
||||
@property
|
||||
def file(self) -> Optional[Path]:
|
||||
if self.module is not None and (file := inspect.getsourcefile(self.module)):
|
||||
return Path(file).absolute()
|
||||
|
||||
|
||||
class MatcherMeta(type):
|
||||
if TYPE_CHECKING:
|
||||
module_name: Optional[str]
|
||||
type: str
|
||||
_source: Optional[MatcherSource]
|
||||
module_name: Optional[str]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"{self.__name__}(type={self.type!r}"
|
||||
+ (f", module={self.module_name}" if self.module_name else "")
|
||||
+ (
|
||||
f", lineno={self._source.lineno}"
|
||||
if self._source and self._source.lineno is not None
|
||||
else ""
|
||||
)
|
||||
+ ")"
|
||||
)
|
||||
|
||||
@@ -90,14 +133,7 @@ class MatcherMeta(type):
|
||||
class Matcher(metaclass=MatcherMeta):
|
||||
"""事件响应器类"""
|
||||
|
||||
plugin: ClassVar[Optional["Plugin"]] = None
|
||||
"""事件响应器所在插件"""
|
||||
module: ClassVar[Optional[ModuleType]] = None
|
||||
"""事件响应器所在插件模块"""
|
||||
plugin_name: ClassVar[Optional[str]] = None
|
||||
"""事件响应器所在插件名"""
|
||||
module_name: ClassVar[Optional[str]] = None
|
||||
"""事件响应器所在点分割插件模块路径"""
|
||||
_source: ClassVar[Optional[MatcherSource]] = None
|
||||
|
||||
type: ClassVar[str] = ""
|
||||
"""事件响应器类型"""
|
||||
@@ -124,7 +160,7 @@ class Matcher(metaclass=MatcherMeta):
|
||||
_default_permission_updater: ClassVar[Optional[Dependent[Permission]]] = None
|
||||
"""事件响应器权限更新函数"""
|
||||
|
||||
HANDLER_PARAM_TYPES = (
|
||||
HANDLER_PARAM_TYPES: ClassVar[Tuple[Type[Param], ...]] = (
|
||||
DependParam,
|
||||
BotParam,
|
||||
EventParam,
|
||||
@@ -142,6 +178,11 @@ class Matcher(metaclass=MatcherMeta):
|
||||
return (
|
||||
f"{self.__class__.__name__}(type={self.type!r}"
|
||||
+ (f", module={self.module_name}" if self.module_name else "")
|
||||
+ (
|
||||
f", lineno={self._source.lineno}"
|
||||
if self._source and self._source.lineno is not None
|
||||
else ""
|
||||
)
|
||||
+ ")"
|
||||
)
|
||||
|
||||
@@ -158,6 +199,7 @@ class Matcher(metaclass=MatcherMeta):
|
||||
*,
|
||||
plugin: Optional["Plugin"] = None,
|
||||
module: Optional[ModuleType] = None,
|
||||
source: Optional[MatcherSource] = None,
|
||||
expire_time: Optional[Union[datetime, timedelta]] = None,
|
||||
default_state: Optional[T_State] = None,
|
||||
default_type_updater: Optional[Union[T_TypeUpdater, Dependent[str]]] = None,
|
||||
@@ -176,35 +218,64 @@ class Matcher(metaclass=MatcherMeta):
|
||||
temp: 是否为临时事件响应器,即触发一次后删除
|
||||
priority: 响应优先级
|
||||
block: 是否阻止事件向更低优先级的响应器传播
|
||||
plugin: 事件响应器所在插件
|
||||
module: 事件响应器所在模块
|
||||
default_state: 默认状态 `state`
|
||||
plugin: **Deprecated.** 事件响应器所在插件
|
||||
module: **Deprecated.** 事件响应器所在模块
|
||||
source: 事件响应器源代码上下文信息
|
||||
expire_time: 事件响应器最终有效时间点,过时即被删除
|
||||
default_state: 默认状态 `state`
|
||||
default_type_updater: 默认事件类型更新函数
|
||||
default_permission_updater: 默认会话权限更新函数
|
||||
|
||||
返回:
|
||||
Type[Matcher]: 新的事件响应器类
|
||||
"""
|
||||
if plugin is not None:
|
||||
warnings.warn(
|
||||
(
|
||||
"Pass `plugin` context info to create Matcher is deprecated. "
|
||||
"Use `source` instead."
|
||||
),
|
||||
DeprecationWarning,
|
||||
)
|
||||
if module is not None:
|
||||
warnings.warn(
|
||||
(
|
||||
"Pass `module` context info to create Matcher is deprecated. "
|
||||
"Use `source` instead."
|
||||
),
|
||||
DeprecationWarning,
|
||||
)
|
||||
source = source or (
|
||||
MatcherSource(
|
||||
plugin_name=plugin and plugin.name,
|
||||
module_name=module and module.__name__,
|
||||
)
|
||||
if plugin is not None or module is not None
|
||||
else None
|
||||
)
|
||||
|
||||
NewMatcher = type(
|
||||
cls.__name__,
|
||||
(cls,),
|
||||
{
|
||||
"plugin": plugin,
|
||||
"module": module,
|
||||
"plugin_name": plugin and plugin.name,
|
||||
"module_name": module and module.__name__,
|
||||
"_source": source,
|
||||
"type": type_,
|
||||
"rule": rule or Rule(),
|
||||
"permission": permission or Permission(),
|
||||
"handlers": [
|
||||
"handlers": (
|
||||
[
|
||||
(
|
||||
handler
|
||||
if isinstance(handler, Dependent)
|
||||
else Dependent[Any].parse(
|
||||
call=handler, allow_types=cls.HANDLER_PARAM_TYPES
|
||||
)
|
||||
)
|
||||
for handler in handlers
|
||||
]
|
||||
if handlers
|
||||
else [],
|
||||
else []
|
||||
),
|
||||
"temp": temp,
|
||||
"expire_time": (
|
||||
expire_time
|
||||
@@ -246,13 +317,33 @@ class Matcher(metaclass=MatcherMeta):
|
||||
|
||||
matchers[priority].append(NewMatcher)
|
||||
|
||||
return NewMatcher
|
||||
return NewMatcher # type: ignore
|
||||
|
||||
@classmethod
|
||||
def destroy(cls) -> None:
|
||||
"""销毁当前的事件响应器"""
|
||||
matchers[cls.priority].remove(cls)
|
||||
|
||||
@classproperty
|
||||
def plugin(cls) -> Optional["Plugin"]:
|
||||
"""事件响应器所在插件"""
|
||||
return cls._source and cls._source.plugin
|
||||
|
||||
@classproperty
|
||||
def module(cls) -> Optional[ModuleType]:
|
||||
"""事件响应器所在插件模块"""
|
||||
return cls._source and cls._source.module
|
||||
|
||||
@classproperty
|
||||
def plugin_name(cls) -> Optional[str]:
|
||||
"""事件响应器所在插件名"""
|
||||
return cls._source and cls._source.plugin_name
|
||||
|
||||
@classproperty
|
||||
def module_name(cls) -> Optional[str]:
|
||||
"""事件响应器所在插件模块路径"""
|
||||
return cls._source and cls._source.module_name
|
||||
|
||||
@classmethod
|
||||
async def check_perm(
|
||||
cls,
|
||||
@@ -571,12 +662,10 @@ class Matcher(metaclass=MatcherMeta):
|
||||
raise SkippedException
|
||||
|
||||
@overload
|
||||
def get_receive(self, id: str) -> Union[Event, None]:
|
||||
...
|
||||
def get_receive(self, id: str) -> Union[Event, None]: ...
|
||||
|
||||
@overload
|
||||
def get_receive(self, id: str, default: T) -> Union[Event, T]:
|
||||
...
|
||||
def get_receive(self, id: str, default: T) -> Union[Event, T]: ...
|
||||
|
||||
def get_receive(
|
||||
self, id: str, default: Optional[T] = None
|
||||
@@ -593,12 +682,10 @@ class Matcher(metaclass=MatcherMeta):
|
||||
self.state[LAST_RECEIVE_KEY] = event
|
||||
|
||||
@overload
|
||||
def get_last_receive(self) -> Union[Event, None]:
|
||||
...
|
||||
def get_last_receive(self) -> Union[Event, None]: ...
|
||||
|
||||
@overload
|
||||
def get_last_receive(self, default: T) -> Union[Event, T]:
|
||||
...
|
||||
def get_last_receive(self, default: T) -> Union[Event, T]: ...
|
||||
|
||||
def get_last_receive(
|
||||
self, default: Optional[T] = None
|
||||
@@ -610,12 +697,10 @@ class Matcher(metaclass=MatcherMeta):
|
||||
return self.state.get(LAST_RECEIVE_KEY, default)
|
||||
|
||||
@overload
|
||||
def get_arg(self, key: str) -> Union[Message, None]:
|
||||
...
|
||||
def get_arg(self, key: str) -> Union[Message, None]: ...
|
||||
|
||||
@overload
|
||||
def get_arg(self, key: str, default: T) -> Union[Message, T]:
|
||||
...
|
||||
def get_arg(self, key: str, default: T) -> Union[Message, T]: ...
|
||||
|
||||
def get_arg(
|
||||
self, key: str, default: Optional[T] = None
|
||||
@@ -637,12 +722,10 @@ class Matcher(metaclass=MatcherMeta):
|
||||
self.state[REJECT_TARGET] = target
|
||||
|
||||
@overload
|
||||
def get_target(self) -> Union[str, None]:
|
||||
...
|
||||
def get_target(self) -> Union[str, None]: ...
|
||||
|
||||
@overload
|
||||
def get_target(self, default: T) -> Union[str, T]:
|
||||
...
|
||||
def get_target(self, default: T) -> Union[str, T]: ...
|
||||
|
||||
def get_target(self, default: Optional[T] = None) -> Optional[Union[str, T]]:
|
||||
return self.state.get(REJECT_TARGET, default)
|
||||
@@ -773,8 +856,7 @@ class Matcher(metaclass=MatcherMeta):
|
||||
temp=True,
|
||||
priority=0,
|
||||
block=True,
|
||||
plugin=self.plugin,
|
||||
module=self.module,
|
||||
source=self.__class__._source,
|
||||
expire_time=bot.config.session_expire_timeout,
|
||||
default_state=self.state,
|
||||
default_type_updater=self.__class__._default_type_updater,
|
||||
@@ -794,8 +876,7 @@ class Matcher(metaclass=MatcherMeta):
|
||||
temp=True,
|
||||
priority=0,
|
||||
block=True,
|
||||
plugin=self.plugin,
|
||||
module=self.module,
|
||||
source=self.__class__._source,
|
||||
expire_time=bot.config.session_expire_timeout,
|
||||
default_state=self.state,
|
||||
default_type_updater=self.__class__._default_type_updater,
|
||||
|
@@ -1,15 +1,25 @@
|
||||
import asyncio
|
||||
import inspect
|
||||
from typing_extensions import Annotated
|
||||
from contextlib import AsyncExitStack, contextmanager, asynccontextmanager
|
||||
from typing import TYPE_CHECKING, Any, Type, Tuple, Literal, Callable, Optional, cast
|
||||
from typing_extensions import Self, Annotated, get_args, override, get_origin
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Type,
|
||||
Tuple,
|
||||
Union,
|
||||
Literal,
|
||||
Callable,
|
||||
Optional,
|
||||
cast,
|
||||
)
|
||||
|
||||
from pydantic.typing import get_args, get_origin
|
||||
from pydantic.fields import Required, Undefined, ModelField
|
||||
from pydantic.fields import FieldInfo as PydanticFieldInfo
|
||||
|
||||
from nonebot.dependencies import Param, Dependent
|
||||
from nonebot.dependencies.utils import check_field_type
|
||||
from nonebot.dependencies import Param, Dependent, CustomConfig
|
||||
from nonebot.typing import T_State, T_Handler, T_DependencyCache
|
||||
from nonebot.compat import FieldInfo, ModelField, PydanticUndefined, extract_field_info
|
||||
from nonebot.utils import (
|
||||
get_name,
|
||||
run_sync,
|
||||
@@ -31,26 +41,31 @@ class DependsInner:
|
||||
dependency: Optional[T_Handler] = None,
|
||||
*,
|
||||
use_cache: bool = True,
|
||||
validate: Union[bool, PydanticFieldInfo] = False,
|
||||
) -> None:
|
||||
self.dependency = dependency
|
||||
self.use_cache = use_cache
|
||||
self.validate = validate
|
||||
|
||||
def __repr__(self) -> str:
|
||||
dep = get_name(self.dependency)
|
||||
cache = "" if self.use_cache else ", use_cache=False"
|
||||
return f"DependsInner({dep}{cache})"
|
||||
validate = f", validate={self.validate}" if self.validate else ""
|
||||
return f"DependsInner({dep}{cache}{validate})"
|
||||
|
||||
|
||||
def Depends(
|
||||
dependency: Optional[T_Handler] = None,
|
||||
*,
|
||||
use_cache: bool = True,
|
||||
validate: Union[bool, PydanticFieldInfo] = False,
|
||||
) -> Any:
|
||||
"""子依赖装饰器
|
||||
|
||||
参数:
|
||||
dependency: 依赖函数。默认为参数的类型注释。
|
||||
use_cache: 是否使用缓存。默认为 `True`。
|
||||
validate: 是否使用 Pydantic 类型校验。默认为 `False`。
|
||||
|
||||
用法:
|
||||
```python
|
||||
@@ -70,7 +85,7 @@ def Depends(
|
||||
...
|
||||
```
|
||||
"""
|
||||
return DependsInner(dependency, use_cache=use_cache)
|
||||
return DependsInner(dependency, use_cache=use_cache, validate=validate)
|
||||
|
||||
|
||||
class DependParam(Param):
|
||||
@@ -81,27 +96,56 @@ class DependParam(Param):
|
||||
本注入应该具有最高优先级,因此应该在其他参数之前检查。
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, *args, dependent: Dependent, use_cache: bool, **kwargs: Any
|
||||
) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.dependent = dependent
|
||||
self.use_cache = use_cache
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Depends({self.extra['dependent']})"
|
||||
return f"Depends({self.dependent}, use_cache={self.use_cache})"
|
||||
|
||||
@classmethod
|
||||
def _from_field(
|
||||
cls,
|
||||
sub_dependent: Dependent,
|
||||
use_cache: bool,
|
||||
validate: Union[bool, PydanticFieldInfo],
|
||||
) -> Self:
|
||||
kwargs = {}
|
||||
if isinstance(validate, PydanticFieldInfo):
|
||||
kwargs.update(extract_field_info(validate))
|
||||
|
||||
kwargs["validate"] = bool(validate)
|
||||
kwargs["dependent"] = sub_dependent
|
||||
kwargs["use_cache"] = use_cache
|
||||
|
||||
return cls(**kwargs)
|
||||
|
||||
@classmethod
|
||||
@override
|
||||
def _check_param(
|
||||
cls, param: inspect.Parameter, allow_types: Tuple[Type[Param], ...]
|
||||
) -> Optional["DependParam"]:
|
||||
) -> Optional[Self]:
|
||||
type_annotation, depends_inner = param.annotation, None
|
||||
# extract type annotation and dependency from Annotated
|
||||
if get_origin(param.annotation) is Annotated:
|
||||
type_annotation, *extra_args = get_args(param.annotation)
|
||||
depends_inner = next(
|
||||
(x for x in extra_args if isinstance(x, DependsInner)), None
|
||||
(x for x in reversed(extra_args) if isinstance(x, DependsInner)), None
|
||||
)
|
||||
|
||||
# param default value takes higher priority
|
||||
depends_inner = (
|
||||
param.default if isinstance(param.default, DependsInner) else depends_inner
|
||||
)
|
||||
# not a dependent
|
||||
if depends_inner is None:
|
||||
return
|
||||
|
||||
dependency: T_Handler
|
||||
# sub dependency is not specified, use type annotation
|
||||
if depends_inner.dependency is None:
|
||||
assert (
|
||||
type_annotation is not inspect.Signature.empty
|
||||
@@ -109,13 +153,18 @@ class DependParam(Param):
|
||||
dependency = type_annotation
|
||||
else:
|
||||
dependency = depends_inner.dependency
|
||||
# parse sub dependency
|
||||
sub_dependent = Dependent[Any].parse(
|
||||
call=dependency,
|
||||
allow_types=allow_types,
|
||||
)
|
||||
return cls(Required, use_cache=depends_inner.use_cache, dependent=sub_dependent)
|
||||
|
||||
return cls._from_field(
|
||||
sub_dependent, depends_inner.use_cache, depends_inner.validate
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@override
|
||||
def _check_parameterless(
|
||||
cls, value: Any, allow_types: Tuple[Type[Param], ...]
|
||||
) -> Optional["Param"]:
|
||||
@@ -124,18 +173,19 @@ class DependParam(Param):
|
||||
dependent = Dependent[Any].parse(
|
||||
call=value.dependency, allow_types=allow_types
|
||||
)
|
||||
return cls(Required, use_cache=value.use_cache, dependent=dependent)
|
||||
return cls._from_field(dependent, value.use_cache, value.validate)
|
||||
|
||||
@override
|
||||
async def _solve(
|
||||
self,
|
||||
stack: Optional[AsyncExitStack] = None,
|
||||
dependency_cache: Optional[T_DependencyCache] = None,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
use_cache: bool = self.extra["use_cache"]
|
||||
use_cache: bool = self.use_cache
|
||||
dependency_cache = {} if dependency_cache is None else dependency_cache
|
||||
|
||||
sub_dependent: Dependent = self.extra["dependent"]
|
||||
sub_dependent: Dependent = self.dependent
|
||||
call = cast(Callable[..., Any], sub_dependent.call)
|
||||
|
||||
# solve sub dependency with current cache
|
||||
@@ -169,10 +219,10 @@ class DependParam(Param):
|
||||
dependency_cache[call] = task
|
||||
return await task
|
||||
|
||||
@override
|
||||
async def _check(self, **kwargs: Any) -> None:
|
||||
# run sub dependent pre-checkers
|
||||
sub_dependent: Dependent = self.extra["dependent"]
|
||||
await sub_dependent.check(**kwargs)
|
||||
await self.dependent.check(**kwargs)
|
||||
|
||||
|
||||
class BotParam(Param):
|
||||
@@ -183,46 +233,46 @@ class BotParam(Param):
|
||||
为保证兼容性,本注入还会解析名为 `bot` 且没有类型注解的参数。
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, *args, checker: Optional[ModelField] = None, **kwargs: Any
|
||||
) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.checker = checker
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
"BotParam("
|
||||
+ (
|
||||
repr(cast(ModelField, checker).type_)
|
||||
if (checker := self.extra.get("checker"))
|
||||
else ""
|
||||
)
|
||||
+ (repr(self.checker.annotation) if self.checker is not None else "")
|
||||
+ ")"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@override
|
||||
def _check_param(
|
||||
cls, param: inspect.Parameter, allow_types: Tuple[Type[Param], ...]
|
||||
) -> Optional["BotParam"]:
|
||||
) -> Optional[Self]:
|
||||
from nonebot.adapters import Bot
|
||||
|
||||
# param type is Bot(s) or subclass(es) of Bot or None
|
||||
if generic_check_issubclass(param.annotation, Bot):
|
||||
checker: Optional[ModelField] = None
|
||||
if param.annotation is not Bot:
|
||||
checker = ModelField(
|
||||
name=param.name,
|
||||
type_=param.annotation,
|
||||
class_validators=None,
|
||||
model_config=CustomConfig,
|
||||
default=None,
|
||||
required=True,
|
||||
checker = ModelField.construct(
|
||||
name=param.name, annotation=param.annotation, field_info=FieldInfo()
|
||||
)
|
||||
return cls(Required, checker=checker)
|
||||
return cls(checker=checker)
|
||||
# legacy: param is named "bot" and has no type annotation
|
||||
elif param.annotation == param.empty and param.name == "bot":
|
||||
return cls(Required)
|
||||
return cls()
|
||||
|
||||
@override
|
||||
async def _solve(self, bot: "Bot", **kwargs: Any) -> Any:
|
||||
return bot
|
||||
|
||||
@override
|
||||
async def _check(self, bot: "Bot", **kwargs: Any) -> None:
|
||||
if checker := self.extra.get("checker"):
|
||||
check_field_type(checker, bot)
|
||||
if self.checker is not None:
|
||||
check_field_type(self.checker, bot)
|
||||
|
||||
|
||||
class EventParam(Param):
|
||||
@@ -233,46 +283,46 @@ class EventParam(Param):
|
||||
为保证兼容性,本注入还会解析名为 `event` 且没有类型注解的参数。
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, *args, checker: Optional[ModelField] = None, **kwargs: Any
|
||||
) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.checker = checker
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
"EventParam("
|
||||
+ (
|
||||
repr(cast(ModelField, checker).type_)
|
||||
if (checker := self.extra.get("checker"))
|
||||
else ""
|
||||
)
|
||||
+ (repr(self.checker.annotation) if self.checker is not None else "")
|
||||
+ ")"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@override
|
||||
def _check_param(
|
||||
cls, param: inspect.Parameter, allow_types: Tuple[Type[Param], ...]
|
||||
) -> Optional["EventParam"]:
|
||||
) -> Optional[Self]:
|
||||
from nonebot.adapters import Event
|
||||
|
||||
# param type is Event(s) or subclass(es) of Event or None
|
||||
if generic_check_issubclass(param.annotation, Event):
|
||||
checker: Optional[ModelField] = None
|
||||
if param.annotation is not Event:
|
||||
checker = ModelField(
|
||||
name=param.name,
|
||||
type_=param.annotation,
|
||||
class_validators=None,
|
||||
model_config=CustomConfig,
|
||||
default=None,
|
||||
required=True,
|
||||
checker = ModelField.construct(
|
||||
name=param.name, annotation=param.annotation, field_info=FieldInfo()
|
||||
)
|
||||
return cls(Required, checker=checker)
|
||||
return cls(checker=checker)
|
||||
# legacy: param is named "event" and has no type annotation
|
||||
elif param.annotation == param.empty and param.name == "event":
|
||||
return cls(Required)
|
||||
return cls()
|
||||
|
||||
@override
|
||||
async def _solve(self, event: "Event", **kwargs: Any) -> Any:
|
||||
return event
|
||||
|
||||
@override
|
||||
async def _check(self, event: "Event", **kwargs: Any) -> Any:
|
||||
if checker := self.extra.get("checker", None):
|
||||
check_field_type(checker, event)
|
||||
if self.checker is not None:
|
||||
check_field_type(self.checker, event)
|
||||
|
||||
|
||||
class StateParam(Param):
|
||||
@@ -287,16 +337,18 @@ class StateParam(Param):
|
||||
return "StateParam()"
|
||||
|
||||
@classmethod
|
||||
@override
|
||||
def _check_param(
|
||||
cls, param: inspect.Parameter, allow_types: Tuple[Type[Param], ...]
|
||||
) -> Optional["StateParam"]:
|
||||
) -> Optional[Self]:
|
||||
# param type is T_State
|
||||
if param.annotation is T_State:
|
||||
return cls(Required)
|
||||
return cls()
|
||||
# legacy: param is named "state" and has no type annotation
|
||||
elif param.annotation == param.empty and param.name == "state":
|
||||
return cls(Required)
|
||||
return cls()
|
||||
|
||||
@override
|
||||
async def _solve(self, state: T_State, **kwargs: Any) -> Any:
|
||||
return state
|
||||
|
||||
@@ -309,46 +361,54 @@ class MatcherParam(Param):
|
||||
为保证兼容性,本注入还会解析名为 `matcher` 且没有类型注解的参数。
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, *args, checker: Optional[ModelField] = None, **kwargs: Any
|
||||
) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.checker = checker
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "MatcherParam()"
|
||||
return (
|
||||
"MatcherParam("
|
||||
+ (repr(self.checker.annotation) if self.checker is not None else "")
|
||||
+ ")"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@override
|
||||
def _check_param(
|
||||
cls, param: inspect.Parameter, allow_types: Tuple[Type[Param], ...]
|
||||
) -> Optional["MatcherParam"]:
|
||||
) -> Optional[Self]:
|
||||
from nonebot.matcher import Matcher
|
||||
|
||||
# param type is Matcher(s) or subclass(es) of Matcher or None
|
||||
if generic_check_issubclass(param.annotation, Matcher):
|
||||
checker: Optional[ModelField] = None
|
||||
if param.annotation is not Matcher:
|
||||
checker = ModelField(
|
||||
name=param.name,
|
||||
type_=param.annotation,
|
||||
class_validators=None,
|
||||
model_config=CustomConfig,
|
||||
default=None,
|
||||
required=True,
|
||||
checker = ModelField.construct(
|
||||
name=param.name, annotation=param.annotation, field_info=FieldInfo()
|
||||
)
|
||||
return cls(Required, checker=checker)
|
||||
return cls(checker=checker)
|
||||
# legacy: param is named "matcher" and has no type annotation
|
||||
elif param.annotation == param.empty and param.name == "matcher":
|
||||
return cls(Required)
|
||||
return cls()
|
||||
|
||||
@override
|
||||
async def _solve(self, matcher: "Matcher", **kwargs: Any) -> Any:
|
||||
return matcher
|
||||
|
||||
@override
|
||||
async def _check(self, matcher: "Matcher", **kwargs: Any) -> Any:
|
||||
if checker := self.extra.get("checker", None):
|
||||
check_field_type(checker, matcher)
|
||||
if self.checker is not None:
|
||||
check_field_type(self.checker, matcher)
|
||||
|
||||
|
||||
class ArgInner:
|
||||
def __init__(
|
||||
self, key: Optional[str], type: Literal["message", "str", "plaintext"]
|
||||
) -> None:
|
||||
self.key = key
|
||||
self.type = type
|
||||
self.key: Optional[str] = key
|
||||
self.type: Literal["message", "str", "plaintext"] = type
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"ArgInner(key={self.key!r}, type={self.type!r})"
|
||||
@@ -378,30 +438,39 @@ class ArgParam(Param):
|
||||
留空则会根据参数名称获取。
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
key: str,
|
||||
type: Literal["message", "str", "plaintext"],
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.key = key
|
||||
self.type = type
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"ArgParam(key={self.extra['key']!r}, type={self.extra['type']!r})"
|
||||
return f"ArgParam(key={self.key!r}, type={self.type!r})"
|
||||
|
||||
@classmethod
|
||||
@override
|
||||
def _check_param(
|
||||
cls, param: inspect.Parameter, allow_types: Tuple[Type[Param], ...]
|
||||
) -> Optional["ArgParam"]:
|
||||
) -> Optional[Self]:
|
||||
if isinstance(param.default, ArgInner):
|
||||
return cls(
|
||||
Required, key=param.default.key or param.name, type=param.default.type
|
||||
)
|
||||
return cls(key=param.default.key or param.name, type=param.default.type)
|
||||
elif get_origin(param.annotation) is Annotated:
|
||||
for arg in get_args(param.annotation):
|
||||
for arg in get_args(param.annotation)[:0:-1]:
|
||||
if isinstance(arg, ArgInner):
|
||||
return cls(Required, key=arg.key or param.name, type=arg.type)
|
||||
return cls(key=arg.key or param.name, type=arg.type)
|
||||
|
||||
async def _solve(self, matcher: "Matcher", **kwargs: Any) -> Any:
|
||||
key: str = self.extra["key"]
|
||||
message = matcher.get_arg(key)
|
||||
message = matcher.get_arg(self.key)
|
||||
if message is None:
|
||||
return message
|
||||
if self.extra["type"] == "message":
|
||||
if self.type == "message":
|
||||
return message
|
||||
elif self.extra["type"] == "str":
|
||||
elif self.type == "str":
|
||||
return str(message)
|
||||
else:
|
||||
return message.extract_plain_text()
|
||||
@@ -419,16 +488,18 @@ class ExceptionParam(Param):
|
||||
return "ExceptionParam()"
|
||||
|
||||
@classmethod
|
||||
@override
|
||||
def _check_param(
|
||||
cls, param: inspect.Parameter, allow_types: Tuple[Type[Param], ...]
|
||||
) -> Optional["ExceptionParam"]:
|
||||
) -> Optional[Self]:
|
||||
# param type is Exception(s) or subclass(es) of Exception or None
|
||||
if generic_check_issubclass(param.annotation, Exception):
|
||||
return cls(Required)
|
||||
return cls()
|
||||
# legacy: param is named "exception" and has no type annotation
|
||||
elif param.annotation == param.empty and param.name == "exception":
|
||||
return cls(Required)
|
||||
return cls()
|
||||
|
||||
@override
|
||||
async def _solve(self, exception: Optional[Exception] = None, **kwargs: Any) -> Any:
|
||||
return exception
|
||||
|
||||
@@ -445,14 +516,16 @@ class DefaultParam(Param):
|
||||
return f"DefaultParam(default={self.default!r})"
|
||||
|
||||
@classmethod
|
||||
@override
|
||||
def _check_param(
|
||||
cls, param: inspect.Parameter, allow_types: Tuple[Type[Param], ...]
|
||||
) -> Optional["DefaultParam"]:
|
||||
) -> Optional[Self]:
|
||||
if param.default != param.empty:
|
||||
return cls(param.default)
|
||||
return cls(default=param.default)
|
||||
|
||||
@override
|
||||
async def _solve(self, **kwargs: Any) -> Any:
|
||||
return Undefined
|
||||
return PydanticUndefined
|
||||
|
||||
|
||||
__autodoc__ = {
|
||||
|
@@ -39,11 +39,13 @@ class Permission:
|
||||
|
||||
def __init__(self, *checkers: Union[T_PermissionChecker, Dependent[bool]]) -> None:
|
||||
self.checkers: Set[Dependent[bool]] = {
|
||||
(
|
||||
checker
|
||||
if isinstance(checker, Dependent)
|
||||
else Dependent[bool].parse(
|
||||
call=checker, allow_types=self.HANDLER_PARAM_TYPES
|
||||
)
|
||||
)
|
||||
for checker in checkers
|
||||
}
|
||||
"""存储 `PermissionChecker`"""
|
||||
|
@@ -38,11 +38,13 @@ class Rule:
|
||||
|
||||
def __init__(self, *checkers: Union[T_RuleChecker, Dependent[bool]]) -> None:
|
||||
self.checkers: Set[Dependent[bool]] = {
|
||||
(
|
||||
checker
|
||||
if isinstance(checker, Dependent)
|
||||
else Dependent[bool].parse(
|
||||
call=checker, allow_types=self.HANDLER_PARAM_TYPES
|
||||
)
|
||||
)
|
||||
for checker in checkers
|
||||
}
|
||||
"""存储 `RuleChecker`"""
|
||||
|
@@ -8,6 +8,7 @@ FrontMatter:
|
||||
from nonebot.internal.matcher import Matcher as Matcher
|
||||
from nonebot.internal.matcher import matchers as matchers
|
||||
from nonebot.internal.matcher import current_bot as current_bot
|
||||
from nonebot.internal.matcher import MatcherSource as MatcherSource
|
||||
from nonebot.internal.matcher import current_event as current_event
|
||||
from nonebot.internal.matcher import MatcherManager as MatcherManager
|
||||
from nonebot.internal.matcher import MatcherProvider as MatcherProvider
|
||||
|
@@ -358,9 +358,18 @@ async def _check_matcher(
|
||||
return False
|
||||
|
||||
try:
|
||||
if not await Matcher.check_perm(
|
||||
bot, event, stack, dependency_cache
|
||||
) or not await Matcher.check_rule(bot, event, state, stack, dependency_cache):
|
||||
if not await Matcher.check_perm(bot, event, stack, dependency_cache):
|
||||
logger.trace(f"Permission conditions not met for {Matcher}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.opt(colors=True, exception=e).error(
|
||||
f"<r><bg #f8bbd0>Permission check failed for {Matcher}.</bg #f8bbd0></r>"
|
||||
)
|
||||
return False
|
||||
|
||||
try:
|
||||
if not await Matcher.check_rule(bot, event, state, stack, dependency_cache):
|
||||
logger.trace(f"Rule conditions not met for {Matcher}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.opt(colors=True, exception=e).error(
|
||||
|
@@ -5,7 +5,18 @@ FrontMatter:
|
||||
description: nonebot.params 模块
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List, Match, Tuple, Union, Optional
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Match,
|
||||
Tuple,
|
||||
Union,
|
||||
Literal,
|
||||
Callable,
|
||||
Optional,
|
||||
overload,
|
||||
)
|
||||
|
||||
from nonebot.typing import T_State
|
||||
from nonebot.matcher import Matcher
|
||||
@@ -147,13 +158,34 @@ def RegexMatched() -> Match[str]:
|
||||
return Depends(_regex_matched, use_cache=False)
|
||||
|
||||
|
||||
def _regex_str(state: T_State) -> str:
|
||||
return _regex_matched(state).group()
|
||||
def _regex_str(
|
||||
groups: Tuple[Union[str, int], ...]
|
||||
) -> Callable[[T_State], Union[str, Tuple[Union[str, Any], ...], Any]]:
|
||||
def _regex_str_dependency(
|
||||
state: T_State,
|
||||
) -> Union[str, Tuple[Union[str, Any], ...], Any]:
|
||||
return _regex_matched(state).group(*groups)
|
||||
|
||||
return _regex_str_dependency
|
||||
|
||||
|
||||
def RegexStr() -> str:
|
||||
@overload
|
||||
def RegexStr(__group: Literal[0] = 0) -> str: ...
|
||||
|
||||
|
||||
@overload
|
||||
def RegexStr(__group: Union[str, int]) -> Union[str, Any]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def RegexStr(
|
||||
__group1: Union[str, int], __group2: Union[str, int], *groups: Union[str, int]
|
||||
) -> Tuple[Union[str, Any], ...]: ...
|
||||
|
||||
|
||||
def RegexStr(*groups: Union[str, int]) -> Union[str, Tuple[Union[str, Any], ...], Any]:
|
||||
"""正则匹配结果文本"""
|
||||
return Depends(_regex_str, use_cache=False)
|
||||
return Depends(_regex_str(groups), use_cache=False)
|
||||
|
||||
|
||||
def _regex_group(state: T_State) -> Tuple[Any, ...]:
|
||||
|
@@ -29,7 +29,7 @@
|
||||
- `load_builtin_plugins` =>
|
||||
{ref}``load_builtin_plugins` <nonebot.plugin.load.load_builtin_plugins>`
|
||||
- `require` => {ref}``require` <nonebot.plugin.load.require>`
|
||||
- `PluginMetadata` => {ref}``PluginMetadata` <nonebot.plugin.plugin.PluginMetadata>`
|
||||
- `PluginMetadata` => {ref}``PluginMetadata` <nonebot.plugin.model.PluginMetadata>`
|
||||
|
||||
FrontMatter:
|
||||
sidebar_position: 0
|
||||
@@ -39,7 +39,14 @@ FrontMatter:
|
||||
from itertools import chain
|
||||
from types import ModuleType
|
||||
from contextvars import ContextVar
|
||||
from typing import Set, Dict, List, Tuple, Optional
|
||||
from typing import Set, Dict, List, Type, Tuple, TypeVar, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from nonebot import get_driver
|
||||
from nonebot.compat import model_dump, type_validate_python
|
||||
|
||||
C = TypeVar("C", bound=BaseModel)
|
||||
|
||||
_plugins: Dict[str, "Plugin"] = {}
|
||||
_managers: List["PluginManager"] = []
|
||||
@@ -77,7 +84,7 @@ def get_plugin(name: str) -> Optional["Plugin"]:
|
||||
如果为 `load_plugins` 文件夹导入的插件,则为文件(夹)名。
|
||||
|
||||
参数:
|
||||
name: 插件名,即 {ref}`nonebot.plugin.plugin.Plugin.name`。
|
||||
name: 插件名,即 {ref}`nonebot.plugin.model.Plugin.name`。
|
||||
"""
|
||||
return _plugins.get(name)
|
||||
|
||||
@@ -88,7 +95,7 @@ def get_plugin_by_module_name(module_name: str) -> Optional["Plugin"]:
|
||||
如果提供的模块名为某个插件的子模块,同样会返回该插件。
|
||||
|
||||
参数:
|
||||
module_name: 模块名,即 {ref}`nonebot.plugin.plugin.Plugin.module_name`。
|
||||
module_name: 模块名,即 {ref}`nonebot.plugin.model.Plugin.module_name`。
|
||||
"""
|
||||
loaded = {plugin.module_name: plugin for plugin in _plugins.values()}
|
||||
has_parent = True
|
||||
@@ -108,12 +115,17 @@ def get_available_plugin_names() -> Set[str]:
|
||||
return {*chain.from_iterable(manager.available_plugins for manager in _managers)}
|
||||
|
||||
|
||||
def get_plugin_config(config: Type[C]) -> C:
|
||||
"""从全局配置获取当前插件需要的配置项。"""
|
||||
return type_validate_python(config, model_dump(get_driver().config))
|
||||
|
||||
|
||||
from .on import on as on
|
||||
from .manager import PluginManager
|
||||
from .on import on_type as on_type
|
||||
from .model import Plugin as Plugin
|
||||
from .load import require as require
|
||||
from .on import on_regex as on_regex
|
||||
from .plugin import Plugin as Plugin
|
||||
from .on import on_notice as on_notice
|
||||
from .on import on_command as on_command
|
||||
from .on import on_keyword as on_keyword
|
||||
@@ -129,8 +141,8 @@ from .load import load_plugins as load_plugins
|
||||
from .on import on_startswith as on_startswith
|
||||
from .load import load_from_json as load_from_json
|
||||
from .load import load_from_toml as load_from_toml
|
||||
from .model import PluginMetadata as PluginMetadata
|
||||
from .on import on_shell_command as on_shell_command
|
||||
from .plugin import PluginMetadata as PluginMetadata
|
||||
from .load import load_all_plugins as load_all_plugins
|
||||
from .load import load_builtin_plugin as load_builtin_plugin
|
||||
from .load import load_builtin_plugins as load_builtin_plugins
|
||||
|
@@ -12,7 +12,7 @@ from typing import Set, Union, Iterable, Optional
|
||||
|
||||
from nonebot.utils import path_to_module_name
|
||||
|
||||
from .plugin import Plugin
|
||||
from .model import Plugin
|
||||
from .manager import PluginManager
|
||||
from . import _managers, get_plugin, _current_plugin_chain, _module_name_to_plugin_name
|
||||
|
||||
@@ -160,7 +160,7 @@ def require(name: str) -> ModuleType:
|
||||
如果为 `load_plugins` 文件夹导入的插件,则为文件(夹)名。
|
||||
|
||||
参数:
|
||||
name: 插件名,即 {ref}`nonebot.plugin.plugin.Plugin.name`。
|
||||
name: 插件名,即 {ref}`nonebot.plugin.model.Plugin.name`。
|
||||
|
||||
异常:
|
||||
RuntimeError: 插件无法加载
|
||||
@@ -213,8 +213,10 @@ def inherit_supported_adapters(*names: str) -> Optional[Set[str]]:
|
||||
)
|
||||
|
||||
return final_supported and {
|
||||
(
|
||||
f"nonebot.adapters.{adapter_name[1:]}"
|
||||
if adapter_name.startswith("~")
|
||||
else adapter_name
|
||||
)
|
||||
for adapter_name in final_supported
|
||||
}
|
||||
|
@@ -20,7 +20,7 @@ from typing import Set, Dict, List, Iterable, Optional, Sequence
|
||||
from nonebot.log import logger
|
||||
from nonebot.utils import escape_tag, path_to_module_name
|
||||
|
||||
from .plugin import Plugin, PluginMetadata
|
||||
from .model import Plugin, PluginMetadata
|
||||
from . import (
|
||||
_managers,
|
||||
_new_plugin,
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
FrontMatter:
|
||||
sidebar_position: 3
|
||||
description: nonebot.plugin.plugin 模块
|
||||
description: nonebot.plugin.model 模块
|
||||
"""
|
||||
|
||||
import contextlib
|
@@ -7,14 +7,15 @@ FrontMatter:
|
||||
|
||||
import re
|
||||
import inspect
|
||||
import warnings
|
||||
from types import ModuleType
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Set, Dict, List, Type, Tuple, Union, Optional
|
||||
|
||||
from nonebot.adapters import Event
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot.permission import Permission
|
||||
from nonebot.dependencies import Dependent
|
||||
from nonebot.matcher import Matcher, MatcherSource
|
||||
from nonebot.typing import T_State, T_Handler, T_RuleChecker, T_PermissionChecker
|
||||
from nonebot.rule import (
|
||||
Rule,
|
||||
@@ -29,7 +30,7 @@ from nonebot.rule import (
|
||||
shell_command,
|
||||
)
|
||||
|
||||
from .plugin import Plugin
|
||||
from .model import Plugin
|
||||
from . import get_plugin_by_module_name
|
||||
from .manager import _current_plugin_chain
|
||||
|
||||
@@ -45,25 +46,39 @@ def store_matcher(matcher: Type[Matcher]) -> None:
|
||||
plugin_chain[-1].matcher.add(matcher)
|
||||
|
||||
|
||||
def get_matcher_plugin(depth: int = 1) -> Optional[Plugin]:
|
||||
def get_matcher_plugin(depth: int = 1) -> Optional[Plugin]: # pragma: no cover
|
||||
"""获取事件响应器定义所在插件。
|
||||
|
||||
**Deprecated**, 请使用 {ref}`nonebot.plugin.on.get_matcher_source` 获取信息。
|
||||
|
||||
参数:
|
||||
depth: 调用栈深度
|
||||
"""
|
||||
# matcher defined when plugin loading
|
||||
if plugin_chain := _current_plugin_chain.get():
|
||||
return plugin_chain[-1]
|
||||
|
||||
# matcher defined when plugin running
|
||||
if module := get_matcher_module(depth + 1):
|
||||
if plugin := get_plugin_by_module_name(module.__name__):
|
||||
return plugin
|
||||
warnings.warn(
|
||||
"`get_matcher_plugin` is deprecated, please use `get_matcher_source` instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
return (source := get_matcher_source(depth + 1)) and source.plugin
|
||||
|
||||
|
||||
def get_matcher_module(depth: int = 1) -> Optional[ModuleType]:
|
||||
def get_matcher_module(depth: int = 1) -> Optional[ModuleType]: # pragma: no cover
|
||||
"""获取事件响应器定义所在模块。
|
||||
|
||||
**Deprecated**, 请使用 {ref}`nonebot.plugin.on.get_matcher_source` 获取信息。
|
||||
|
||||
参数:
|
||||
depth: 调用栈深度
|
||||
"""
|
||||
warnings.warn(
|
||||
"`get_matcher_module` is deprecated, please use `get_matcher_source` instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
return (source := get_matcher_source(depth + 1)) and source.module
|
||||
|
||||
|
||||
def get_matcher_source(depth: int = 1) -> Optional[MatcherSource]:
|
||||
"""获取事件响应器定义所在源码信息。
|
||||
|
||||
参数:
|
||||
depth: 调用栈深度
|
||||
"""
|
||||
@@ -71,7 +86,22 @@ def get_matcher_module(depth: int = 1) -> Optional[ModuleType]:
|
||||
if current_frame is None:
|
||||
return None
|
||||
frame = inspect.getouterframes(current_frame)[depth + 1].frame
|
||||
return inspect.getmodule(frame)
|
||||
|
||||
module_name = (module := inspect.getmodule(frame)) and module.__name__
|
||||
|
||||
plugin: Optional["Plugin"] = None
|
||||
# matcher defined when plugin loading
|
||||
if plugin_chain := _current_plugin_chain.get():
|
||||
plugin = plugin_chain[-1]
|
||||
# matcher defined when plugin running
|
||||
elif module_name:
|
||||
plugin = get_plugin_by_module_name(module_name)
|
||||
|
||||
return MatcherSource(
|
||||
plugin_name=plugin and plugin.name,
|
||||
module_name=module_name,
|
||||
lineno=frame.f_lineno,
|
||||
)
|
||||
|
||||
|
||||
def on(
|
||||
@@ -109,8 +139,7 @@ def on(
|
||||
priority=priority,
|
||||
block=block,
|
||||
handlers=handlers,
|
||||
plugin=get_matcher_plugin(_depth + 1),
|
||||
module=get_matcher_module(_depth + 1),
|
||||
source=get_matcher_source(_depth + 1),
|
||||
default_state=state,
|
||||
)
|
||||
store_matcher(matcher)
|
||||
|
@@ -4,17 +4,18 @@ from types import ModuleType
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from nonebot.adapters import Event
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot.permission import Permission
|
||||
from nonebot.dependencies import Dependent
|
||||
from nonebot.rule import Rule, ArgumentParser
|
||||
from nonebot.matcher import Matcher, MatcherSource
|
||||
from nonebot.typing import T_State, T_Handler, T_RuleChecker, T_PermissionChecker
|
||||
|
||||
from .plugin import Plugin
|
||||
from .model import Plugin
|
||||
|
||||
def store_matcher(matcher: type[Matcher]) -> None: ...
|
||||
def get_matcher_plugin(depth: int = ...) -> Plugin | None: ...
|
||||
def get_matcher_module(depth: int = ...) -> ModuleType | None: ...
|
||||
def get_matcher_source(depth: int = ...) -> MatcherSource | None: ...
|
||||
def on(
|
||||
type: str = "",
|
||||
rule: Rule | T_RuleChecker | None = ...,
|
||||
|
@@ -19,4 +19,5 @@ echo = on_command("echo", to_me())
|
||||
|
||||
@echo.handle()
|
||||
async def handle_echo(message: Message = CommandArg()):
|
||||
if any((not seg.is_text()) or str(seg) for seg in message):
|
||||
await echo.send(message=message)
|
||||
|
@@ -117,6 +117,11 @@ class TrieRule:
|
||||
# check whitespace
|
||||
arg_str = segment_text[len(pf.key) :]
|
||||
arg_str_stripped = arg_str.lstrip()
|
||||
# check next segment until arg detected or no text remain
|
||||
while not arg_str_stripped and msg and msg[0].is_text():
|
||||
arg_str += str(msg.pop(0))
|
||||
arg_str_stripped = arg_str.lstrip()
|
||||
|
||||
has_arg = arg_str_stripped or msg
|
||||
if (
|
||||
has_arg
|
||||
@@ -455,45 +460,38 @@ class ArgumentParser(ArgParser):
|
||||
self,
|
||||
args: Optional[Sequence[Union[str, MessageSegment]]] = None,
|
||||
namespace: None = None,
|
||||
) -> Tuple[Namespace, List[Union[str, MessageSegment]]]:
|
||||
...
|
||||
) -> Tuple[Namespace, List[Union[str, MessageSegment]]]: ...
|
||||
|
||||
@overload
|
||||
def parse_known_args(
|
||||
self, args: Optional[Sequence[Union[str, MessageSegment]]], namespace: T
|
||||
) -> Tuple[T, List[Union[str, MessageSegment]]]:
|
||||
...
|
||||
) -> Tuple[T, List[Union[str, MessageSegment]]]: ...
|
||||
|
||||
@overload
|
||||
def parse_known_args(
|
||||
self, *, namespace: T
|
||||
) -> Tuple[T, List[Union[str, MessageSegment]]]:
|
||||
...
|
||||
) -> Tuple[T, List[Union[str, MessageSegment]]]: ...
|
||||
|
||||
def parse_known_args(
|
||||
self,
|
||||
args: Optional[Sequence[Union[str, MessageSegment]]] = None,
|
||||
namespace: Optional[T] = None,
|
||||
) -> Tuple[Union[Namespace, T], List[Union[str, MessageSegment]]]:
|
||||
...
|
||||
) -> Tuple[Union[Namespace, T], List[Union[str, MessageSegment]]]: ...
|
||||
|
||||
@overload
|
||||
def parse_args(
|
||||
self,
|
||||
args: Optional[Sequence[Union[str, MessageSegment]]] = None,
|
||||
namespace: None = None,
|
||||
) -> Namespace:
|
||||
...
|
||||
) -> Namespace: ...
|
||||
|
||||
@overload
|
||||
def parse_args(
|
||||
self, args: Optional[Sequence[Union[str, MessageSegment]]], namespace: T
|
||||
) -> T:
|
||||
...
|
||||
) -> T: ...
|
||||
|
||||
@overload
|
||||
def parse_args(self, *, namespace: T) -> T:
|
||||
...
|
||||
def parse_args(self, *, namespace: T) -> T: ...
|
||||
|
||||
def parse_args(
|
||||
self,
|
||||
@@ -599,7 +597,7 @@ def shell_command(
|
||||
通过 {ref}`nonebot.params.ShellCommandArgs` 获取解析后的参数字典
|
||||
(例: `{"arg": "arg", "h": True}`)。
|
||||
|
||||
:::warning 警告
|
||||
:::caution 警告
|
||||
如果参数解析失败,则通过 {ref}`nonebot.params.ShellCommandArgs`
|
||||
获取的将是 {ref}`nonebot.exception.ParserExit` 异常。
|
||||
:::
|
||||
|
@@ -10,18 +10,14 @@ FrontMatter:
|
||||
description: nonebot.typing 模块
|
||||
"""
|
||||
|
||||
import sys
|
||||
import types
|
||||
import warnings
|
||||
from typing_extensions import ParamSpec, TypeAlias, override
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Dict,
|
||||
Union,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Optional,
|
||||
Awaitable,
|
||||
)
|
||||
import contextlib
|
||||
import typing as t
|
||||
import typing_extensions as t_ext
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
from typing_extensions import ParamSpec, TypeAlias, get_args, override, get_origin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from asyncio import Task
|
||||
@@ -32,7 +28,7 @@ if TYPE_CHECKING:
|
||||
T = TypeVar("T")
|
||||
P = ParamSpec("P")
|
||||
|
||||
T_Wrapped: TypeAlias = Callable[P, T]
|
||||
T_Wrapped: TypeAlias = t.Callable[P, T]
|
||||
|
||||
|
||||
def overrides(InterfaceClass: object):
|
||||
@@ -47,14 +43,77 @@ def overrides(InterfaceClass: object):
|
||||
return override
|
||||
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
|
||||
def origin_is_union(origin: t.Optional[t.Type[t.Any]]) -> bool:
|
||||
"""判断是否是 Union 类型"""
|
||||
return origin is t.Union
|
||||
|
||||
else:
|
||||
|
||||
def origin_is_union(origin: t.Optional[t.Type[t.Any]]) -> bool:
|
||||
return origin is t.Union or origin is types.UnionType
|
||||
|
||||
|
||||
def origin_is_literal(origin: t.Optional[t.Type[t.Any]]) -> bool:
|
||||
"""判断是否是 Literal 类型"""
|
||||
return origin is t.Literal or origin is t_ext.Literal
|
||||
|
||||
|
||||
def _literal_values(type_: t.Type[t.Any]) -> t.Tuple[t.Any, ...]:
|
||||
return get_args(type_)
|
||||
|
||||
|
||||
def all_literal_values(type_: t.Type[t.Any]) -> t.List[t.Any]:
|
||||
"""获取 Literal 类型包含的所有值"""
|
||||
if not origin_is_literal(get_origin(type_)):
|
||||
return [type_]
|
||||
|
||||
return [x for value in _literal_values(type_) for x in all_literal_values(value)]
|
||||
|
||||
|
||||
def origin_is_annotated(origin: t.Optional[t.Type[t.Any]]) -> bool:
|
||||
"""判断是否是 Annotated 类型"""
|
||||
with contextlib.suppress(TypeError):
|
||||
return origin is not None and issubclass(origin, t_ext.Annotated)
|
||||
return False
|
||||
|
||||
|
||||
NONE_TYPES = {None, type(None), t.Literal[None], t_ext.Literal[None]}
|
||||
if sys.version_info >= (3, 10):
|
||||
NONE_TYPES.add(types.NoneType)
|
||||
|
||||
|
||||
def is_none_type(type_: t.Type[t.Any]) -> bool:
|
||||
"""判断是否是 None 类型"""
|
||||
return type_ in NONE_TYPES
|
||||
|
||||
|
||||
if sys.version_info < (3, 9): # pragma: py-lt-39
|
||||
|
||||
def evaluate_forwardref(
|
||||
ref: t.ForwardRef, globalns: t.Dict[str, t.Any], localns: t.Dict[str, t.Any]
|
||||
) -> t.Any:
|
||||
return ref._evaluate(globalns, localns)
|
||||
|
||||
else: # pragma: py-gte-39
|
||||
|
||||
def evaluate_forwardref(
|
||||
ref: t.ForwardRef, globalns: t.Dict[str, t.Any], localns: t.Dict[str, t.Any]
|
||||
) -> t.Any:
|
||||
return ref._evaluate(globalns, localns, frozenset())
|
||||
|
||||
|
||||
# state
|
||||
T_State: TypeAlias = Dict[Any, Any]
|
||||
T_State: TypeAlias = t.Dict[t.Any, t.Any]
|
||||
"""事件处理状态 State 类型"""
|
||||
|
||||
_DependentCallable: TypeAlias = Union[Callable[..., T], Callable[..., Awaitable[T]]]
|
||||
_DependentCallable: TypeAlias = t.Union[
|
||||
t.Callable[..., T], t.Callable[..., t.Awaitable[T]]
|
||||
]
|
||||
|
||||
# driver hooks
|
||||
T_BotConnectionHook: TypeAlias = _DependentCallable[Any]
|
||||
T_BotConnectionHook: TypeAlias = _DependentCallable[t.Any]
|
||||
"""Bot 连接建立时钩子函数
|
||||
|
||||
依赖参数:
|
||||
@@ -63,7 +122,7 @@ T_BotConnectionHook: TypeAlias = _DependentCallable[Any]
|
||||
- BotParam: Bot 对象
|
||||
- DefaultParam: 带有默认值的参数
|
||||
"""
|
||||
T_BotDisconnectionHook: TypeAlias = _DependentCallable[Any]
|
||||
T_BotDisconnectionHook: TypeAlias = _DependentCallable[t.Any]
|
||||
"""Bot 连接断开时钩子函数
|
||||
|
||||
依赖参数:
|
||||
@@ -74,15 +133,17 @@ T_BotDisconnectionHook: TypeAlias = _DependentCallable[Any]
|
||||
"""
|
||||
|
||||
# api hooks
|
||||
T_CallingAPIHook: TypeAlias = Callable[["Bot", str, Dict[str, Any]], Awaitable[Any]]
|
||||
T_CallingAPIHook: TypeAlias = t.Callable[
|
||||
["Bot", str, t.Dict[str, t.Any]], t.Awaitable[t.Any]
|
||||
]
|
||||
"""`bot.call_api` 钩子函数"""
|
||||
T_CalledAPIHook: TypeAlias = Callable[
|
||||
["Bot", Optional[Exception], str, Dict[str, Any], Any], Awaitable[Any]
|
||||
T_CalledAPIHook: TypeAlias = t.Callable[
|
||||
["Bot", t.Optional[Exception], str, t.Dict[str, t.Any], t.Any], t.Awaitable[t.Any]
|
||||
]
|
||||
"""`bot.call_api` 后执行的函数,参数分别为 bot, exception, api, data, result"""
|
||||
|
||||
# event hooks
|
||||
T_EventPreProcessor: TypeAlias = _DependentCallable[Any]
|
||||
T_EventPreProcessor: TypeAlias = _DependentCallable[t.Any]
|
||||
"""事件预处理函数 EventPreProcessor 类型
|
||||
|
||||
依赖参数:
|
||||
@@ -93,8 +154,8 @@ T_EventPreProcessor: TypeAlias = _DependentCallable[Any]
|
||||
- StateParam: State 对象
|
||||
- DefaultParam: 带有默认值的参数
|
||||
"""
|
||||
T_EventPostProcessor: TypeAlias = _DependentCallable[Any]
|
||||
"""事件预处理函数 EventPostProcessor 类型
|
||||
T_EventPostProcessor: TypeAlias = _DependentCallable[t.Any]
|
||||
"""事件后处理函数 EventPostProcessor 类型
|
||||
|
||||
依赖参数:
|
||||
|
||||
@@ -106,7 +167,7 @@ T_EventPostProcessor: TypeAlias = _DependentCallable[Any]
|
||||
"""
|
||||
|
||||
# matcher run hooks
|
||||
T_RunPreProcessor: TypeAlias = _DependentCallable[Any]
|
||||
T_RunPreProcessor: TypeAlias = _DependentCallable[t.Any]
|
||||
"""事件响应器运行前预处理函数 RunPreProcessor 类型
|
||||
|
||||
依赖参数:
|
||||
@@ -118,7 +179,7 @@ T_RunPreProcessor: TypeAlias = _DependentCallable[Any]
|
||||
- MatcherParam: Matcher 对象
|
||||
- DefaultParam: 带有默认值的参数
|
||||
"""
|
||||
T_RunPostProcessor: TypeAlias = _DependentCallable[Any]
|
||||
T_RunPostProcessor: TypeAlias = _DependentCallable[t.Any]
|
||||
"""事件响应器运行后后处理函数 RunPostProcessor 类型
|
||||
|
||||
依赖参数:
|
||||
@@ -155,7 +216,7 @@ T_PermissionChecker: TypeAlias = _DependentCallable[bool]
|
||||
- DefaultParam: 带有默认值的参数
|
||||
"""
|
||||
|
||||
T_Handler: TypeAlias = _DependentCallable[Any]
|
||||
T_Handler: TypeAlias = _DependentCallable[t.Any]
|
||||
"""Handler 处理函数。"""
|
||||
T_TypeUpdater: TypeAlias = _DependentCallable[str]
|
||||
"""TypeUpdater 在 Matcher.pause, Matcher.reject 时被运行,用于更新响应的事件类型。
|
||||
@@ -183,5 +244,5 @@ T_PermissionUpdater: TypeAlias = _DependentCallable["Permission"]
|
||||
- MatcherParam: Matcher 对象
|
||||
- DefaultParam: 带有默认值的参数
|
||||
"""
|
||||
T_DependencyCache: TypeAlias = Dict[_DependentCallable[Any], "Task[Any]"]
|
||||
T_DependencyCache: TypeAlias = t.Dict[_DependentCallable[t.Any], "Task[t.Any]"]
|
||||
"""依赖缓存, 用于存储依赖函数的返回值"""
|
||||
|
@@ -12,27 +12,38 @@ import inspect
|
||||
import importlib
|
||||
import dataclasses
|
||||
from pathlib import Path
|
||||
from collections import deque
|
||||
from contextvars import copy_context
|
||||
from functools import wraps, partial
|
||||
from contextlib import asynccontextmanager
|
||||
from typing_extensions import ParamSpec, get_args, override, get_origin
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
Type,
|
||||
Tuple,
|
||||
Union,
|
||||
Generic,
|
||||
Mapping,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Optional,
|
||||
Sequence,
|
||||
Coroutine,
|
||||
AsyncGenerator,
|
||||
ContextManager,
|
||||
overload,
|
||||
)
|
||||
|
||||
from pydantic.typing import is_union, is_none_type
|
||||
from pydantic import BaseModel
|
||||
|
||||
from nonebot.log import logger
|
||||
from nonebot.typing import (
|
||||
is_none_type,
|
||||
origin_is_union,
|
||||
origin_is_literal,
|
||||
all_literal_values,
|
||||
)
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
@@ -52,6 +63,34 @@ def escape_tag(s: str) -> str:
|
||||
return re.sub(r"</?((?:[fb]g\s)?[^<>\s]*)>", r"\\\g<0>", s)
|
||||
|
||||
|
||||
def deep_update(
|
||||
mapping: Dict[K, Any], *updating_mappings: Dict[K, Any]
|
||||
) -> Dict[K, Any]:
|
||||
"""深度更新合并字典"""
|
||||
updated_mapping = mapping.copy()
|
||||
for updating_mapping in updating_mappings:
|
||||
for k, v in updating_mapping.items():
|
||||
if (
|
||||
k in updated_mapping
|
||||
and isinstance(updated_mapping[k], dict)
|
||||
and isinstance(v, dict)
|
||||
):
|
||||
updated_mapping[k] = deep_update(updated_mapping[k], v)
|
||||
else:
|
||||
updated_mapping[k] = v
|
||||
return updated_mapping
|
||||
|
||||
|
||||
def lenient_issubclass(
|
||||
cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...]]
|
||||
) -> bool:
|
||||
"""检查 cls 是否是 class_or_tuple 中的一个类型子类并忽略类型错误。"""
|
||||
try:
|
||||
return isinstance(cls, type) and issubclass(cls, class_or_tuple)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
|
||||
def generic_check_issubclass(
|
||||
cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...]]
|
||||
) -> bool:
|
||||
@@ -61,6 +100,8 @@ def generic_check_issubclass(
|
||||
|
||||
- 如果 cls 是 `typing.Union` 或 `types.UnionType` 类型,
|
||||
则会检查其中的所有类型是否是 class_or_tuple 中一个类型的子类或 None。
|
||||
- 如果 cls 是 `typing.Literal` 类型,
|
||||
则会检查其中的所有值是否是 class_or_tuple 中一个类型的实例。
|
||||
- 如果 cls 是 `typing.TypeVar` 类型,
|
||||
则会检查其 `__bound__` 或 `__constraints__`
|
||||
是否是 class_or_tuple 中一个类型的子类或 None。
|
||||
@@ -69,14 +110,23 @@ def generic_check_issubclass(
|
||||
return issubclass(cls, class_or_tuple)
|
||||
except TypeError:
|
||||
origin = get_origin(cls)
|
||||
if is_union(origin):
|
||||
if origin_is_union(origin):
|
||||
return all(
|
||||
is_none_type(type_) or generic_check_issubclass(type_, class_or_tuple)
|
||||
for type_ in get_args(cls)
|
||||
)
|
||||
elif origin_is_literal(origin):
|
||||
return all(
|
||||
is_none_type(value) or isinstance(value, class_or_tuple)
|
||||
for value in all_literal_values(cls)
|
||||
)
|
||||
# ensure generic List, Dict can be checked
|
||||
elif origin:
|
||||
# avoid class check error (typing.Final, typing.ClassVar, etc...)
|
||||
try:
|
||||
return issubclass(origin, class_or_tuple)
|
||||
except TypeError:
|
||||
return False
|
||||
elif isinstance(cls, TypeVar):
|
||||
if cls.__constraints__:
|
||||
return all(
|
||||
@@ -89,6 +139,21 @@ def generic_check_issubclass(
|
||||
return False
|
||||
|
||||
|
||||
def type_is_complex(type_: Type[Any]) -> bool:
|
||||
"""检查 type_ 是否是复杂类型"""
|
||||
origin = get_origin(type_)
|
||||
return _type_is_complex_inner(type_) or _type_is_complex_inner(origin)
|
||||
|
||||
|
||||
def _type_is_complex_inner(type_: Optional[Type[Any]]) -> bool:
|
||||
if lenient_issubclass(type_, (str, bytes)):
|
||||
return False
|
||||
|
||||
return lenient_issubclass(
|
||||
type_, (BaseModel, Mapping, Sequence, tuple, set, frozenset, deque)
|
||||
) or dataclasses.is_dataclass(type_)
|
||||
|
||||
|
||||
def is_coroutine_callable(call: Callable[..., Any]) -> bool:
|
||||
"""检查 call 是否是一个 callable 协程函数"""
|
||||
if inspect.isroutine(call):
|
||||
@@ -153,8 +218,7 @@ async def run_coro_with_catch(
|
||||
coro: Coroutine[Any, Any, T],
|
||||
exc: Tuple[Type[Exception], ...],
|
||||
return_on_err: None = None,
|
||||
) -> Union[T, None]:
|
||||
...
|
||||
) -> Union[T, None]: ...
|
||||
|
||||
|
||||
@overload
|
||||
@@ -162,8 +226,7 @@ async def run_coro_with_catch(
|
||||
coro: Coroutine[Any, Any, T],
|
||||
exc: Tuple[Type[Exception], ...],
|
||||
return_on_err: R,
|
||||
) -> Union[T, R]:
|
||||
...
|
||||
) -> Union[T, R]: ...
|
||||
|
||||
|
||||
async def run_coro_with_catch(
|
||||
@@ -220,6 +283,16 @@ def resolve_dot_notation(
|
||||
return instance
|
||||
|
||||
|
||||
class classproperty(Generic[T]):
|
||||
"""类属性装饰器"""
|
||||
|
||||
def __init__(self, func: Callable[[Any], T]) -> None:
|
||||
self.func = func
|
||||
|
||||
def __get__(self, instance: Any, owner: Optional[Type[Any]] = None) -> T:
|
||||
return self.func(type(instance) if owner is None else owner)
|
||||
|
||||
|
||||
class DataclassEncoder(json.JSONEncoder):
|
||||
"""可以序列化 {ref}`nonebot.adapters.Message`(List[Dataclass]) 的 `JSONEncoder`"""
|
||||
|
||||
|
23
package.json
23
package.json
@@ -12,11 +12,30 @@
|
||||
"serve": "yarn workspace nonebot serve",
|
||||
"clear": "yarn workspace nonebot clear",
|
||||
"prettier": "prettier --config ./.prettierrc --write \"./website/\"",
|
||||
"lint": "yarn lint:js && yarn lint:style",
|
||||
"lint:js": "eslint --cache --report-unused-disable-directives \"**/*.{js,jsx,ts,tsx,mjs}\"",
|
||||
"lint:js:fix": "eslint --cache --report-unused-disable-directives --fix \"**/*.{js,jsx,ts,tsx,mjs}\"",
|
||||
"lint:style": "stylelint \"**/*.css\"",
|
||||
"lint:style:fix": "stylelint --fix \"**/*.css\"",
|
||||
"pyright": "pyright"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@typescript-eslint/eslint-plugin": "^6.6.0",
|
||||
"@typescript-eslint/parser": "^6.6.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"prettier": "^2.5.0",
|
||||
"pyright": "^1.1.317"
|
||||
"eslint": "^8.48.0",
|
||||
"eslint-config-prettier": "^9.0.0",
|
||||
"eslint-import-resolver-typescript": "^3.6.0",
|
||||
"eslint-plugin-import": "^2.28.1",
|
||||
"eslint-plugin-jsx-a11y": "^6.7.1",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"eslint-plugin-react": "^7.33.2",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-regexp": "^1.15.0",
|
||||
"prettier": "^3.0.3",
|
||||
"pyright": "^1.1.317",
|
||||
"stylelint": "^15.10.3",
|
||||
"stylelint-config-standard": "^34.0.0",
|
||||
"stylelint-prettier": "^4.0.2"
|
||||
}
|
||||
}
|
||||
|
2605
poetry.lock
generated
2605
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "nonebot2"
|
||||
version = "2.0.1"
|
||||
version = "2.2.1"
|
||||
description = "An asynchronous python bot framework."
|
||||
authors = ["yanyongyu <yyy@nonebot.dev>"]
|
||||
license = "MIT"
|
||||
@@ -14,11 +14,9 @@ classifiers = [
|
||||
"Framework :: Robot Framework",
|
||||
"Framework :: Robot Framework :: Library",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3"
|
||||
]
|
||||
packages = [
|
||||
{ include = "nonebot" },
|
||||
"Programming Language :: Python :: 3",
|
||||
]
|
||||
packages = [{ include = "nonebot" }]
|
||||
include = ["nonebot/py.typed"]
|
||||
|
||||
[tool.poetry.urls]
|
||||
@@ -31,31 +29,29 @@ python = "^3.8"
|
||||
yarl = "^1.7.2"
|
||||
pygtrie = "^2.4.1"
|
||||
loguru = ">=0.6.0,<1.0.0"
|
||||
typing-extensions = ">=4.0.0,<5.0.0"
|
||||
python-dotenv = ">=0.21.0,<2.0.0"
|
||||
typing-extensions = ">=4.4.0,<5.0.0"
|
||||
pydantic = ">=1.10.0,<3.0.0,!=2.5.0,!=2.5.1"
|
||||
tomli = { version = "^2.0.1", python = "<3.11" }
|
||||
pydantic = { version = "^1.10.0", extras = ["dotenv"] }
|
||||
|
||||
websockets = { version = ">=10.0", optional = true }
|
||||
Quart = { version = ">=0.18.0,<1.0.0", optional = true }
|
||||
fastapi = { version = ">=0.93.0,<1.0.0", optional = true }
|
||||
aiohttp = { version = "^3.7.4", extras = ["speedups"], optional = true }
|
||||
aiohttp = { version = "^3.9.0b0", extras = ["speedups"], optional = true }
|
||||
httpx = { version = ">=0.20.0,<1.0.0", extras = ["http2"], optional = true }
|
||||
uvicorn = { version = ">=0.20.0,<1.0.0", extras = ["standard"], optional = true }
|
||||
uvicorn = { version = ">=0.20.0,<1.0.0", extras = [
|
||||
"standard",
|
||||
], optional = true }
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "^0.2.0"
|
||||
isort = "^5.10.1"
|
||||
black = "^23.1.0"
|
||||
black = "^24.0.0"
|
||||
nonemoji = "^0.1.2"
|
||||
pre-commit = "^3.0.0"
|
||||
ruff = ">=0.0.272,<1.0.0"
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
nonebug = "^0.3.0"
|
||||
werkzeug = "^2.3.6"
|
||||
pytest-cov = "^4.0.0"
|
||||
pytest-xdist = "^3.0.2"
|
||||
pytest-asyncio = "^0.21.0"
|
||||
coverage-conditional-plugin = "^0.9.0"
|
||||
nonebot-test = { path = "./envs/test/", develop = false }
|
||||
|
||||
[tool.poetry.group.docs.dependencies]
|
||||
nb-autodoc = "^1.0.0a5"
|
||||
@@ -71,10 +67,7 @@ all = ["fastapi", "quart", "aiohttp", "httpx", "websockets", "uvicorn"]
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "strict"
|
||||
addopts = "--cov=nonebot --cov-append --cov-report=term-missing"
|
||||
filterwarnings = [
|
||||
"error",
|
||||
"ignore::DeprecationWarning",
|
||||
]
|
||||
filterwarnings = ["error", "ignore::DeprecationWarning"]
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
@@ -93,27 +86,31 @@ src_paths = ["nonebot", "tests"]
|
||||
extra_standard_library = ["typing_extensions"]
|
||||
|
||||
[tool.ruff]
|
||||
select = ["E", "W", "F", "UP", "C", "T", "PYI", "PT", "Q"]
|
||||
ignore = ["E402", "C901"]
|
||||
|
||||
line-length = 88
|
||||
target-version = "py38"
|
||||
|
||||
[tool.ruff.flake8-pytest-style]
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "W", "F", "UP", "C", "T", "PYI", "PT", "Q"]
|
||||
ignore = ["E402", "C901", "UP037"]
|
||||
|
||||
[tool.ruff.lint.flake8-pytest-style]
|
||||
fixture-parentheses = false
|
||||
mark-parentheses = false
|
||||
|
||||
[tool.pyright]
|
||||
pythonVersion = "3.8"
|
||||
pythonPlatform = "All"
|
||||
defineConstant = { PYDANTIC_V2 = true }
|
||||
executionEnvironments = [
|
||||
{ root = "./tests", extraPaths = ["./"] },
|
||||
{ root = "./tests", extraPaths = [
|
||||
"./",
|
||||
] },
|
||||
{ root = "./" },
|
||||
]
|
||||
|
||||
typeCheckingMode = "basic"
|
||||
reportShadowedImports = false
|
||||
|
||||
disableBytesTypePromotions = true
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry_core>=1.0.0"]
|
||||
|
11
scripts/build-api-docs.sh
Executable file
11
scripts/build-api-docs.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#! /usr/bin/env bash
|
||||
|
||||
# cd to the root of the project
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
poetry run nb-autodoc nonebot \
|
||||
-s nonebot.plugins \
|
||||
-u nonebot.internal \
|
||||
-u nonebot.internal.*
|
||||
cp -r ./build/nonebot/* ./website/docs/api/
|
||||
yarn prettier
|
7
scripts/run-tests.sh
Executable file
7
scripts/run-tests.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#! /usr/bin/env bash
|
||||
|
||||
# cd to the root of the tests
|
||||
cd "$(dirname "$0")/../tests"
|
||||
|
||||
# Run the tests
|
||||
pytest -n auto --cov-report xml $@
|
14
scripts/setup-envs.sh
Executable file
14
scripts/setup-envs.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#! /usr/bin/env bash
|
||||
|
||||
# config poetry to install env in project
|
||||
poetry config virtualenvs.in-project true
|
||||
|
||||
# setup dev environment
|
||||
echo "Setting up dev environment"
|
||||
poetry install --all-extras && poetry run pre-commit install && yarn install
|
||||
|
||||
# setup pydantic v2 test environment
|
||||
for env in $(find ./envs/ -maxdepth 1 -mindepth 1 -type d -not -name test); do
|
||||
echo "Setting up $env environment"
|
||||
(cd $env && poetry install --no-root)
|
||||
done
|
15
scripts/update-envs.sh
Executable file
15
scripts/update-envs.sh
Executable file
@@ -0,0 +1,15 @@
|
||||
#! /usr/bin/env bash
|
||||
|
||||
# update test env
|
||||
echo "Updating test env..."
|
||||
(cd ./envs/test/ && poetry update --lock)
|
||||
|
||||
# update dev env
|
||||
echo "Updating dev env..."
|
||||
poetry update
|
||||
|
||||
# update other envs
|
||||
for env in $(find ./envs/ -maxdepth 1 -mindepth 1 -type d -not -name test); do
|
||||
echo "Updating $env env..."
|
||||
(cd $env && poetry update)
|
||||
done
|
@@ -11,7 +11,8 @@ exclude_lines =
|
||||
if (typing\.)?TYPE_CHECKING( is True)?:
|
||||
@(abc\.)?abstractmethod
|
||||
raise NotImplementedError
|
||||
\.\.\.
|
||||
warnings\.warn
|
||||
^\.\.\.$
|
||||
pass
|
||||
if __name__ == .__main__.:
|
||||
|
||||
@@ -21,5 +22,8 @@ rules =
|
||||
"sys_platform != 'linux'": py-linux
|
||||
"sys_platform != 'darwin'": py-darwin
|
||||
"sys_version_info < (3, 9)": py-gte-39
|
||||
"sys_version_info >= (3, 9)": py-lt-39
|
||||
"sys_version_info < (3, 11)": py-gte-311
|
||||
"sys_version_info >= (3, 11)": py-lt-311
|
||||
"package_version('pydantic') < (2,)": pydantic-v2
|
||||
"package_version('pydantic') >= (2,)": pydantic-v1
|
||||
|
17
tests/.env.example
Normal file
17
tests/.env.example
Normal file
@@ -0,0 +1,17 @@
|
||||
SIMPLE=simple
|
||||
COMPLEX='
|
||||
[1, 2, 3]
|
||||
'
|
||||
COMPLEX_NONE
|
||||
COMPLEX_UNION=[1, 2, 3]
|
||||
NESTED={"a": 1}
|
||||
NESTED__B=2
|
||||
NESTED__C__C=3
|
||||
NESTED__COMPLEX=[1, 2, 3]
|
||||
NESTED_INNER__A=1
|
||||
NESTED_INNER__B=2
|
||||
OTHER_SIMPLE=simple
|
||||
OTHER_NESTED={"a": 1}
|
||||
OTHER_NESTED__B=2
|
||||
OTHER_NESTED_INNER__A=1
|
||||
OTHER_NESTED_INNER__B=2
|
@@ -13,3 +13,4 @@ NESTED_MISSING_DICT__A=1
|
||||
NESTED_MISSING_DICT__B__C=2
|
||||
NOT_NESTED=some string
|
||||
NOT_NESTED__A=1
|
||||
PLUGIN_CONFIG=1
|
||||
|
@@ -8,8 +8,10 @@ from nonebug import NONEBOT_INIT_KWARGS
|
||||
from werkzeug.serving import BaseWSGIServer, make_server
|
||||
|
||||
import nonebot
|
||||
from nonebot.drivers import URL
|
||||
from nonebot.config import Env
|
||||
from fake_server import request_handler
|
||||
from nonebot.drivers import URL, Driver
|
||||
from nonebot import _resolve_combine_expr
|
||||
|
||||
os.environ["CONFIG_FROM_ENV"] = '{"test": "test"}'
|
||||
os.environ["CONFIG_OVERRIDE"] = "new"
|
||||
@@ -17,11 +19,24 @@ os.environ["CONFIG_OVERRIDE"] = "new"
|
||||
if TYPE_CHECKING:
|
||||
from nonebot.plugin import Plugin
|
||||
|
||||
collect_ignore = ["plugins/", "dynamic/", "bad_plugins/"]
|
||||
|
||||
|
||||
def pytest_configure(config: pytest.Config) -> None:
|
||||
config.stash[NONEBOT_INIT_KWARGS] = {"config_from_init": "init"}
|
||||
|
||||
|
||||
@pytest.fixture(name="driver")
|
||||
def load_driver(request: pytest.FixtureRequest) -> Driver:
|
||||
driver_name = getattr(request, "param", None)
|
||||
global_driver = nonebot.get_driver()
|
||||
if driver_name is None:
|
||||
return global_driver
|
||||
|
||||
DriverClass = _resolve_combine_expr(driver_name)
|
||||
return DriverClass(Env(environment=global_driver.env), global_driver.config)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def load_plugin(nonebug_init: None) -> Set["Plugin"]:
|
||||
# preload global plugins
|
||||
|
@@ -1,9 +1,15 @@
|
||||
import json
|
||||
import base64
|
||||
import socket
|
||||
from typing import Dict, List, Union, TypeVar
|
||||
|
||||
from wsproto.events import Ping
|
||||
from werkzeug import Request, Response
|
||||
from werkzeug.datastructures import MultiDict
|
||||
from wsproto.frame_protocol import CloseReason
|
||||
from wsproto.events import Request as WSRequest
|
||||
from wsproto import WSConnection, ConnectionType
|
||||
from wsproto.events import TextMessage, BytesMessage, CloseConnection, AcceptConnection
|
||||
|
||||
K = TypeVar("K")
|
||||
V = TypeVar("V")
|
||||
@@ -29,8 +35,7 @@ def flattern(d: "MultiDict[K, V]") -> Dict[K, Union[V, List[V]]]:
|
||||
return {k: v[0] if len(v) == 1 else v for k, v in d.to_dict(flat=False).items()}
|
||||
|
||||
|
||||
@Request.application
|
||||
def request_handler(request: Request) -> Response:
|
||||
def http_echo(request: Request) -> Response:
|
||||
try:
|
||||
_json = json.loads(request.data.decode("utf-8"))
|
||||
except (ValueError, TypeError):
|
||||
@@ -67,3 +72,65 @@ def request_handler(request: Request) -> Response:
|
||||
status=200,
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
|
||||
def websocket_echo(request: Request) -> Response:
|
||||
stream = request.environ["werkzeug.socket"]
|
||||
|
||||
ws = WSConnection(ConnectionType.SERVER)
|
||||
|
||||
in_data = b"GET %s HTTP/1.1\r\n" % request.path.encode("utf-8")
|
||||
for header, value in request.headers.items():
|
||||
in_data += f"{header}: {value}\r\n".encode()
|
||||
in_data += b"\r\n"
|
||||
|
||||
ws.receive_data(in_data)
|
||||
|
||||
running: bool = True
|
||||
while True:
|
||||
out_data = b""
|
||||
|
||||
for event in ws.events():
|
||||
if isinstance(event, WSRequest):
|
||||
out_data += ws.send(AcceptConnection())
|
||||
elif isinstance(event, CloseConnection):
|
||||
out_data += ws.send(event.response())
|
||||
running = False
|
||||
elif isinstance(event, Ping):
|
||||
out_data += ws.send(event.response())
|
||||
elif isinstance(event, TextMessage):
|
||||
if event.data == "quit":
|
||||
out_data += ws.send(
|
||||
CloseConnection(CloseReason.NORMAL_CLOSURE, "bye")
|
||||
)
|
||||
running = False
|
||||
else:
|
||||
out_data += ws.send(TextMessage(data=event.data))
|
||||
elif isinstance(event, BytesMessage):
|
||||
if event.data == b"quit":
|
||||
out_data += ws.send(
|
||||
CloseConnection(CloseReason.NORMAL_CLOSURE, "bye")
|
||||
)
|
||||
running = False
|
||||
else:
|
||||
out_data += ws.send(BytesMessage(data=event.data))
|
||||
|
||||
if out_data:
|
||||
stream.send(out_data)
|
||||
|
||||
if not running:
|
||||
break
|
||||
|
||||
in_data = stream.recv(4096)
|
||||
ws.receive_data(in_data)
|
||||
|
||||
stream.shutdown(socket.SHUT_RDWR)
|
||||
return Response("", status=204)
|
||||
|
||||
|
||||
@Request.application
|
||||
def request_handler(request: Request) -> Response:
|
||||
if request.headers.get("Connection") == "Upgrade":
|
||||
return websocket_echo(request)
|
||||
else:
|
||||
return http_echo(request)
|
||||
|
3
tests/plugins/matcher/matcher_info.py
Normal file
3
tests/plugins/matcher/matcher_info.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from nonebot import on
|
||||
|
||||
matcher = on("message", temp=False, expire_time=None, priority=1, block=True)
|
@@ -78,8 +78,7 @@ async def reject_preset(a: str = ArgStr(), b: str = ArgStr()):
|
||||
test_overload = on_message()
|
||||
|
||||
|
||||
class FakeEvent(Event):
|
||||
...
|
||||
class FakeEvent(Event): ...
|
||||
|
||||
|
||||
@test_overload.got("a")
|
||||
|
@@ -8,8 +8,7 @@ class Config(BaseModel):
|
||||
custom: str = ""
|
||||
|
||||
|
||||
class FakeAdapter(Adapter):
|
||||
...
|
||||
class FakeAdapter(Adapter): ...
|
||||
|
||||
|
||||
__plugin_meta__ = PluginMetadata(
|
||||
|
@@ -26,3 +26,14 @@ async def annotated_arg_str(key: Annotated[str, ArgStr()]) -> str:
|
||||
|
||||
async def annotated_arg_plain_text(key: Annotated[str, ArgPlainText()]) -> str:
|
||||
return key
|
||||
|
||||
|
||||
# test dependency priority
|
||||
async def annotated_prior_arg(key: Annotated[str, ArgStr("foo")] = ArgPlainText()):
|
||||
return key
|
||||
|
||||
|
||||
async def annotated_multi_arg(
|
||||
key: Annotated[Annotated[str, ArgStr("foo")], ArgPlainText()]
|
||||
):
|
||||
return key
|
||||
|
@@ -11,20 +11,17 @@ async def legacy_bot(bot):
|
||||
return bot
|
||||
|
||||
|
||||
async def not_legacy_bot(bot: int):
|
||||
...
|
||||
async def not_legacy_bot(bot: int): ...
|
||||
|
||||
|
||||
class FooBot(Bot):
|
||||
...
|
||||
class FooBot(Bot): ...
|
||||
|
||||
|
||||
async def sub_bot(b: FooBot) -> FooBot:
|
||||
return b
|
||||
|
||||
|
||||
class BarBot(Bot):
|
||||
...
|
||||
class BarBot(Bot): ...
|
||||
|
||||
|
||||
async def union_bot(b: Union[FooBot, BarBot]) -> Union[FooBot, BarBot]:
|
||||
@@ -45,5 +42,4 @@ async def generic_bot_none(b: CB) -> CB:
|
||||
return b
|
||||
|
||||
|
||||
async def not_bot(b: Union[int, Bot]):
|
||||
...
|
||||
async def not_bot(b: Union[int, Bot]): ...
|
||||
|
@@ -1,7 +1,10 @@
|
||||
from dataclasses import dataclass
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from nonebot import on_message
|
||||
from nonebot.adapters import Bot
|
||||
from nonebot.params import Depends
|
||||
|
||||
test_depends = on_message()
|
||||
@@ -33,6 +36,13 @@ class ClassDependency:
|
||||
y: int = Depends(gen_async)
|
||||
|
||||
|
||||
class FooBot(Bot): ...
|
||||
|
||||
|
||||
async def sub_bot(b: FooBot) -> FooBot:
|
||||
return b
|
||||
|
||||
|
||||
# test parameterless
|
||||
@test_depends.handle(parameterless=[Depends(parameterless)])
|
||||
async def depends(x: int = Depends(dependency)):
|
||||
@@ -46,19 +56,52 @@ async def depends_cache(y: int = Depends(dependency, use_cache=True)):
|
||||
return y
|
||||
|
||||
|
||||
# test class dependency
|
||||
async def class_depend(c: ClassDependency = Depends()):
|
||||
return c
|
||||
|
||||
|
||||
# test annotated dependency
|
||||
async def annotated_depend(x: Annotated[int, Depends(dependency)]):
|
||||
return x
|
||||
|
||||
|
||||
# test annotated class dependency
|
||||
async def annotated_class_depend(c: Annotated[ClassDependency, Depends()]):
|
||||
return c
|
||||
|
||||
|
||||
# test dependency priority
|
||||
async def annotated_prior_depend(
|
||||
x: Annotated[int, Depends(lambda: 2)] = Depends(dependency)
|
||||
):
|
||||
return x
|
||||
|
||||
|
||||
async def annotated_multi_depend(
|
||||
x: Annotated[Annotated[int, Depends(lambda: 2)], Depends(dependency)]
|
||||
):
|
||||
return x
|
||||
|
||||
|
||||
# test sub dependency type mismatch
|
||||
async def sub_type_mismatch(b: FooBot = Depends(sub_bot)):
|
||||
return b
|
||||
|
||||
|
||||
# test type validate
|
||||
async def validate(x: int = Depends(lambda: "1", validate=True)):
|
||||
return x
|
||||
|
||||
|
||||
async def validate_fail(x: int = Depends(lambda: "not_number", validate=True)):
|
||||
return x
|
||||
|
||||
|
||||
# test FieldInfo validate
|
||||
async def validate_field(x: int = Depends(lambda: "1", validate=Field(gt=0))):
|
||||
return x
|
||||
|
||||
|
||||
async def validate_field_fail(x: int = Depends(lambda: "0", validate=Field(gt=0))):
|
||||
return x
|
||||
|
@@ -12,20 +12,17 @@ async def legacy_event(event):
|
||||
return event
|
||||
|
||||
|
||||
async def not_legacy_event(event: int):
|
||||
...
|
||||
async def not_legacy_event(event: int): ...
|
||||
|
||||
|
||||
class FooEvent(Event):
|
||||
...
|
||||
class FooEvent(Event): ...
|
||||
|
||||
|
||||
async def sub_event(e: FooEvent) -> FooEvent:
|
||||
return e
|
||||
|
||||
|
||||
class BarEvent(Event):
|
||||
...
|
||||
class BarEvent(Event): ...
|
||||
|
||||
|
||||
async def union_event(e: Union[FooEvent, BarEvent]) -> Union[FooEvent, BarEvent]:
|
||||
@@ -46,8 +43,7 @@ async def generic_event_none(e: CE) -> CE:
|
||||
return e
|
||||
|
||||
|
||||
async def not_event(e: Union[int, Event]):
|
||||
...
|
||||
async def not_event(e: Union[int, Event]): ...
|
||||
|
||||
|
||||
async def event_type(t: str = EventType()) -> str:
|
||||
|
@@ -4,3 +4,7 @@ from typing import Union
|
||||
async def exc(e: Exception, x: Union[ValueError, TypeError]) -> Exception:
|
||||
assert e == x
|
||||
return e
|
||||
|
||||
|
||||
async def legacy_exc(exception) -> Exception:
|
||||
return exception
|
||||
|
@@ -13,20 +13,17 @@ async def legacy_matcher(matcher):
|
||||
return matcher
|
||||
|
||||
|
||||
async def not_legacy_matcher(matcher: int):
|
||||
...
|
||||
async def not_legacy_matcher(matcher: int): ...
|
||||
|
||||
|
||||
class FooMatcher(Matcher):
|
||||
...
|
||||
class FooMatcher(Matcher): ...
|
||||
|
||||
|
||||
async def sub_matcher(m: FooMatcher) -> FooMatcher:
|
||||
return m
|
||||
|
||||
|
||||
class BarMatcher(Matcher):
|
||||
...
|
||||
class BarMatcher(Matcher): ...
|
||||
|
||||
|
||||
async def union_matcher(
|
||||
@@ -49,8 +46,7 @@ async def generic_matcher_none(m: CM) -> CM:
|
||||
return m
|
||||
|
||||
|
||||
async def not_matcher(m: Union[int, Matcher]):
|
||||
...
|
||||
async def not_matcher(m: Union[int, Matcher]): ...
|
||||
|
||||
|
||||
async def receive(e: Event = Received("test")) -> Event:
|
||||
|
@@ -29,8 +29,7 @@ async def legacy_state(state):
|
||||
return state
|
||||
|
||||
|
||||
async def not_legacy_state(state: int):
|
||||
...
|
||||
async def not_legacy_state(state: int): ...
|
||||
|
||||
|
||||
async def command(cmd: Tuple[str, ...] = Command()) -> Tuple[str, ...]:
|
||||
@@ -77,8 +76,13 @@ async def regex_matched(regex_matched: Match[str] = RegexMatched()) -> Match[str
|
||||
return regex_matched
|
||||
|
||||
|
||||
async def regex_str(regex_str: str = RegexStr()) -> str:
|
||||
return regex_str
|
||||
async def regex_str(
|
||||
entire: str = RegexStr(),
|
||||
type_: str = RegexStr("type"),
|
||||
second: str = RegexStr(2),
|
||||
groups: Tuple[str, ...] = RegexStr(1, "arg"),
|
||||
) -> Tuple[str, str, str, Tuple[str, ...]]:
|
||||
return entire, type_, second, groups
|
||||
|
||||
|
||||
async def startswith(startswith: str = Startswith()) -> str:
|
||||
|
@@ -19,5 +19,4 @@ async def complex_priority(
|
||||
arg: Message = Arg(),
|
||||
exception: Optional[Exception] = None,
|
||||
default: int = 1,
|
||||
):
|
||||
...
|
||||
): ...
|
||||
|
@@ -202,8 +202,7 @@ matcher_on_regex = on_regex(
|
||||
)
|
||||
|
||||
|
||||
class TestEvent(Event):
|
||||
...
|
||||
class TestEvent(Event): ...
|
||||
|
||||
|
||||
matcher_on_type = on_type(
|
||||
|
210
tests/test_adapters/test_adapter.py
Normal file
210
tests/test_adapters/test_adapter.py
Normal file
@@ -0,0 +1,210 @@
|
||||
from typing import Optional
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import pytest
|
||||
from nonebug import App
|
||||
|
||||
from utils import FakeAdapter
|
||||
from nonebot.adapters import Bot
|
||||
from nonebot.drivers import (
|
||||
URL,
|
||||
Driver,
|
||||
Request,
|
||||
Response,
|
||||
WebSocket,
|
||||
HTTPServerSetup,
|
||||
WebSocketServerSetup,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_adapter_connect(app: App, driver: Driver):
|
||||
last_connect_bot: Optional[Bot] = None
|
||||
last_disconnect_bot: Optional[Bot] = None
|
||||
|
||||
def _fake_bot_connect(bot: Bot):
|
||||
nonlocal last_connect_bot
|
||||
last_connect_bot = bot
|
||||
|
||||
def _fake_bot_disconnect(bot: Bot):
|
||||
nonlocal last_disconnect_bot
|
||||
last_disconnect_bot = bot
|
||||
|
||||
with pytest.MonkeyPatch.context() as m:
|
||||
m.setattr(driver, "_bot_connect", _fake_bot_connect)
|
||||
m.setattr(driver, "_bot_disconnect", _fake_bot_disconnect)
|
||||
|
||||
adapter = FakeAdapter(driver)
|
||||
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot(adapter=adapter)
|
||||
assert last_connect_bot is bot
|
||||
assert adapter.bots[bot.self_id] is bot
|
||||
|
||||
assert last_disconnect_bot is bot
|
||||
assert bot.self_id not in adapter.bots
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"driver",
|
||||
[
|
||||
pytest.param("nonebot.drivers.fastapi:Driver", id="fastapi"),
|
||||
pytest.param("nonebot.drivers.quart:Driver", id="quart"),
|
||||
pytest.param(
|
||||
"nonebot.drivers.httpx:Driver",
|
||||
id="httpx",
|
||||
marks=pytest.mark.xfail(
|
||||
reason="not a server", raises=TypeError, strict=True
|
||||
),
|
||||
),
|
||||
pytest.param(
|
||||
"nonebot.drivers.websockets:Driver",
|
||||
id="websockets",
|
||||
marks=pytest.mark.xfail(
|
||||
reason="not a server", raises=TypeError, strict=True
|
||||
),
|
||||
),
|
||||
pytest.param(
|
||||
"nonebot.drivers.aiohttp:Driver",
|
||||
id="aiohttp",
|
||||
marks=pytest.mark.xfail(
|
||||
reason="not a server", raises=TypeError, strict=True
|
||||
),
|
||||
),
|
||||
],
|
||||
indirect=True,
|
||||
)
|
||||
async def test_adapter_server(driver: Driver):
|
||||
last_http_setup: Optional[HTTPServerSetup] = None
|
||||
last_ws_setup: Optional[WebSocketServerSetup] = None
|
||||
|
||||
def _fake_setup_http_server(setup: HTTPServerSetup):
|
||||
nonlocal last_http_setup
|
||||
last_http_setup = setup
|
||||
|
||||
def _fake_setup_websocket_server(setup: WebSocketServerSetup):
|
||||
nonlocal last_ws_setup
|
||||
last_ws_setup = setup
|
||||
|
||||
with pytest.MonkeyPatch.context() as m:
|
||||
m.setattr(driver, "setup_http_server", _fake_setup_http_server, raising=False)
|
||||
m.setattr(
|
||||
driver,
|
||||
"setup_websocket_server",
|
||||
_fake_setup_websocket_server,
|
||||
raising=False,
|
||||
)
|
||||
|
||||
async def handle_http(request: Request):
|
||||
return Response(200, content="test")
|
||||
|
||||
async def handle_ws(ws: WebSocket): ...
|
||||
|
||||
adapter = FakeAdapter(driver)
|
||||
|
||||
setup = HTTPServerSetup(URL("/test"), "GET", "test", handle_http)
|
||||
adapter.setup_http_server(setup)
|
||||
assert last_http_setup is setup
|
||||
|
||||
setup = WebSocketServerSetup(URL("/test"), "test", handle_ws)
|
||||
adapter.setup_websocket_server(setup)
|
||||
assert last_ws_setup is setup
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"driver",
|
||||
[
|
||||
pytest.param(
|
||||
"nonebot.drivers.fastapi:Driver",
|
||||
id="fastapi",
|
||||
marks=pytest.mark.xfail(
|
||||
reason="not a http client", raises=TypeError, strict=True
|
||||
),
|
||||
),
|
||||
pytest.param(
|
||||
"nonebot.drivers.quart:Driver",
|
||||
id="quart",
|
||||
marks=pytest.mark.xfail(
|
||||
reason="not a http client", raises=TypeError, strict=True
|
||||
),
|
||||
),
|
||||
pytest.param("nonebot.drivers.httpx:Driver", id="httpx"),
|
||||
pytest.param(
|
||||
"nonebot.drivers.websockets:Driver",
|
||||
id="websockets",
|
||||
marks=pytest.mark.xfail(
|
||||
reason="not a http client", raises=TypeError, strict=True
|
||||
),
|
||||
),
|
||||
pytest.param("nonebot.drivers.aiohttp:Driver", id="aiohttp"),
|
||||
],
|
||||
indirect=True,
|
||||
)
|
||||
async def test_adapter_http_client(driver: Driver):
|
||||
last_request: Optional[Request] = None
|
||||
|
||||
async def _fake_request(request: Request):
|
||||
nonlocal last_request
|
||||
last_request = request
|
||||
|
||||
with pytest.MonkeyPatch.context() as m:
|
||||
m.setattr(driver, "request", _fake_request, raising=False)
|
||||
|
||||
adapter = FakeAdapter(driver)
|
||||
|
||||
request = Request("GET", URL("/test"))
|
||||
await adapter.request(request)
|
||||
assert last_request is request
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"driver",
|
||||
[
|
||||
pytest.param(
|
||||
"nonebot.drivers.fastapi:Driver",
|
||||
id="fastapi",
|
||||
marks=pytest.mark.xfail(
|
||||
reason="not a websocket client", raises=TypeError, strict=True
|
||||
),
|
||||
),
|
||||
pytest.param(
|
||||
"nonebot.drivers.quart:Driver",
|
||||
id="quart",
|
||||
marks=pytest.mark.xfail(
|
||||
reason="not a websocket client", raises=TypeError, strict=True
|
||||
),
|
||||
),
|
||||
pytest.param(
|
||||
"nonebot.drivers.httpx:Driver",
|
||||
id="httpx",
|
||||
marks=pytest.mark.xfail(
|
||||
reason="not a websocket client", raises=TypeError, strict=True
|
||||
),
|
||||
),
|
||||
pytest.param("nonebot.drivers.websockets:Driver", id="websockets"),
|
||||
pytest.param("nonebot.drivers.aiohttp:Driver", id="aiohttp"),
|
||||
],
|
||||
indirect=True,
|
||||
)
|
||||
async def test_adapter_websocket_client(driver: Driver):
|
||||
_fake_ws = object()
|
||||
_last_request: Optional[Request] = None
|
||||
|
||||
@asynccontextmanager
|
||||
async def _fake_websocket(setup: Request):
|
||||
nonlocal _last_request
|
||||
_last_request = setup
|
||||
yield _fake_ws
|
||||
|
||||
with pytest.MonkeyPatch.context() as m:
|
||||
m.setattr(driver, "websocket", _fake_websocket, raising=False)
|
||||
|
||||
adapter = FakeAdapter(driver)
|
||||
|
||||
request = Request("GET", URL("/test"))
|
||||
async with adapter.websocket(request) as ws:
|
||||
assert _last_request is request
|
||||
assert ws is _fake_ws
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user