mirror of
https://github.com/nonebot/nonebot2.git
synced 2025-10-06 18:56:45 +00:00
Compare commits
250 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
35cee22cf6 | ||
|
fbb55228f2 | ||
|
391ac00d81 | ||
|
277b744ca3 | ||
|
a89c67a50e | ||
|
26b30a7b22 | ||
|
4dae23d3bb | ||
|
07e6c3f977 | ||
|
dace63d9d2 | ||
|
2ebf956599 | ||
|
b20793c67a | ||
|
47e9f59cc8 | ||
|
e27cac7fef | ||
|
5bfda6e2bc | ||
|
ef2ab7df48 | ||
|
ac1d9147d2 | ||
|
f2350909d2 | ||
|
f14ef93808 | ||
|
45bd4252bf | ||
|
6b4456bf0e | ||
|
c5e114dc7f | ||
|
30ceea4287 | ||
|
380f9ff013 | ||
|
19ac119714 | ||
|
236f70183c | ||
|
117bc35653 | ||
|
4fcaa8d3d6 | ||
|
536889d3df | ||
|
bbd13c04cc | ||
|
82e4ccb227 | ||
|
626cfa474f | ||
|
18e9a9afd3 | ||
|
41b7d5a3a0 | ||
|
16fcd4c639 | ||
|
ef3641efa6 | ||
|
8d95a32672 | ||
|
3a3a718779 | ||
|
3d1955211a | ||
|
8d87715d6f | ||
|
3c535b8e99 | ||
|
2c6affecea | ||
|
c2d2169a9f | ||
|
1153c5ff17 | ||
|
6c532f5926 | ||
|
7083394bc9 | ||
|
7c58410868 | ||
|
00c3e3b713 | ||
|
9d4a72766d | ||
|
82e16b4438 | ||
|
56353f2d0a | ||
|
4d0eb94a6f | ||
|
e1a494ecbd | ||
|
6b1e34da63 | ||
|
ccf9597102 | ||
|
5a6f4b9e1c | ||
|
9b09b42f97 | ||
|
854345e16f | ||
|
e0ee865b87 | ||
|
dad0c01335 | ||
|
79ef5af19b | ||
|
b349959f93 | ||
|
2e7f9612af | ||
|
8ff2303b22 | ||
|
b681fdd6d6 | ||
|
b65b3b438c | ||
|
580d6bab36 | ||
|
90349ddd7d | ||
|
dcac421bc0 | ||
|
b4f643577f | ||
|
411e7168b3 | ||
|
fef072a62a | ||
|
f529e9cb23 | ||
|
cfa3bfd88c | ||
|
321c99f12b | ||
|
73ad4992ee | ||
|
ddbf37c1be | ||
|
b9392371c7 | ||
|
d3c26a1548 | ||
|
31c2a61cce | ||
|
f84ba9768b | ||
|
1faa935527 | ||
|
5f940ff309 | ||
|
4c4c0ea0ba | ||
|
787b40a99e | ||
|
fd6a0ae747 | ||
|
298a32c096 | ||
|
aecff5ffd6 | ||
|
c1a6b7b787 | ||
|
0903f19f9c | ||
|
51aa23817a | ||
|
8f3f385cb6 | ||
|
915274081d | ||
|
a388c52b3f | ||
|
b4d3cd4d4d | ||
|
50c03b0675 | ||
|
fa3bb96417 | ||
|
09bde57835 | ||
|
76ac2a8843 | ||
|
f6ec6962ab | ||
|
28ad6829cd | ||
|
7f4b002a87 | ||
|
7e073b6ff4 | ||
|
fa3781efe5 | ||
|
bec74d85cd | ||
|
abc3829c64 | ||
|
18f5d6eab9 | ||
|
00f3e30930 | ||
|
97cd21d004 | ||
|
09b4d44f23 | ||
|
3536bf56bd | ||
|
f8eaf5def0 | ||
|
6077f85e52 | ||
|
e2976a3859 | ||
|
1e25fde22e | ||
|
55d88b7dae | ||
|
de30f8917f | ||
|
52653fa005 | ||
|
4628358add | ||
|
117b08a73e | ||
|
700888a8e0 | ||
|
ef882927f3 | ||
|
af9327de14 | ||
|
2881d42bf5 | ||
|
dc3a49fe57 | ||
|
addabd6396 | ||
|
3341c641cc | ||
|
363413e1e6 | ||
|
b675d27a30 | ||
|
796023408a | ||
|
983a8512b2 | ||
|
6593102632 | ||
|
65fff13150 | ||
|
edd1a140d7 | ||
|
18070baad4 | ||
|
acf729f6e7 | ||
|
6dbc8eac03 | ||
|
35944bcbdc | ||
|
3f919f91c1 | ||
|
443a20d83d | ||
|
2fca26eaae | ||
|
ebc8141971 | ||
|
5d6bcc9b9b | ||
|
55fca332ba | ||
|
6b65c5fe69 | ||
|
3e4dbe1015 | ||
|
20197e64b2 | ||
|
94eecaf448 | ||
|
fa91e0e79b | ||
|
891adc38fc | ||
|
af6cc63db2 | ||
|
af73e14b64 | ||
|
9305fe7875 | ||
|
613fde4639 | ||
|
61db2c898b | ||
|
acf313c420 | ||
|
15fca08641 | ||
|
e2cbe3c1f8 | ||
|
d3883ea3ae | ||
|
8b2c4b3e60 | ||
|
65d0d00591 | ||
|
97a57c2f6e | ||
|
6559b2ff27 | ||
|
4c1deeb899 | ||
|
a65ea6805d | ||
|
effe65b034 | ||
|
37296cf048 | ||
|
1b597c1301 | ||
|
c2454d0689 | ||
|
9b60b44554 | ||
|
75516bdafb | ||
|
12f5a487c1 | ||
|
8d128d5035 | ||
|
cfa7117e64 | ||
|
7880bf0dc1 | ||
|
0054041829 | ||
|
99931f785a | ||
|
5e121269f0 | ||
|
38ced0243f | ||
|
869db878e1 | ||
|
e6c6e355e1 | ||
|
6221b9a5fd | ||
|
5f2c9c935b | ||
|
76559b253c | ||
|
3c54655c39 | ||
|
7a851ac199 | ||
|
b2ba5dfcd1 | ||
|
4a4fae8f8c | ||
|
de894ce7b2 | ||
|
09c4a955c9 | ||
|
db1581a0a2 | ||
|
db9d7b3060 | ||
|
7e0c29472e | ||
|
d13492070d | ||
|
695ede51ea | ||
|
168f382aa6 | ||
|
5bd433318d | ||
|
d1cd2a793e | ||
|
5a4464f338 | ||
|
561d25320b | ||
|
b225c2dd3b | ||
|
2a2e357513 | ||
|
28bfe1ecb8 | ||
|
cc12f0af7e | ||
|
da831a1b08 | ||
|
eb97be17dd | ||
|
2dd1c9b2ad | ||
|
41191db863 | ||
|
ee20204b22 | ||
|
f1032804bb | ||
|
ba1540d75b | ||
|
f5c87f80e1 | ||
|
d2d7603ff5 | ||
|
56013dca48 | ||
|
d33ed4a69f | ||
|
ed753b5564 | ||
|
7e65552d01 | ||
|
f77dc523e6 | ||
|
0d84bf3592 | ||
|
94dff49e60 | ||
|
5d4cf7e421 | ||
|
0e3e16e809 | ||
|
183fc8defb | ||
|
8712e89322 | ||
|
e2b49f9b65 | ||
|
7e11f3a3d6 | ||
|
71bebb6ec7 | ||
|
842c6ff4c6 | ||
|
7754f6da1d | ||
|
60e0752f1a | ||
|
ede1a20c53 | ||
|
04289fd50f | ||
|
ba3efa9e7c | ||
|
c5a66a6ed0 | ||
|
8a23b1554a | ||
|
d73f226cbd | ||
|
fd9ba678ec | ||
|
d29ba62ff9 | ||
|
00c97fd18f | ||
|
9531c3fa74 | ||
|
94293122e8 | ||
|
7aaa66c8ba | ||
|
0030bf725e | ||
|
22b6062900 | ||
|
005968ab70 | ||
|
dc6c194701 | ||
|
9b8772b590 | ||
|
ae8ba9f55d | ||
|
f4a7ce2c09 | ||
|
c84723668f | ||
|
bd3ed4207a |
@@ -4,7 +4,7 @@
|
||||
"features": {
|
||||
"ghcr.io/devcontainers-contrib/features/poetry:2": {}
|
||||
},
|
||||
"postCreateCommand": "poetry config virtualenvs.in-project true && poetry install -E all && poetry run pre-commit install && yarn install",
|
||||
"postCreateCommand": "./scripts/setup-envs.sh",
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
|
6
.eslintignore
Normal file
6
.eslintignore
Normal file
@@ -0,0 +1,6 @@
|
||||
dist
|
||||
node_modules
|
||||
.yarn
|
||||
.history
|
||||
build
|
||||
lib
|
85
.eslintrc.js
Normal file
85
.eslintrc.js
Normal file
@@ -0,0 +1,85 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
env: {
|
||||
browser: true,
|
||||
commonjs: true,
|
||||
node: true,
|
||||
},
|
||||
parser: "@typescript-eslint/parser",
|
||||
parserOptions: {
|
||||
tsconfigRootDir: __dirname,
|
||||
project: ["./tsconfig.json", "./website/tsconfig.json"],
|
||||
},
|
||||
globals: {
|
||||
JSX: true,
|
||||
},
|
||||
extends: [
|
||||
"eslint:recommended",
|
||||
"plugin:react/recommended",
|
||||
"plugin:react-hooks/recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:import/recommended",
|
||||
"plugin:regexp/recommended",
|
||||
"plugin:prettier/recommended",
|
||||
],
|
||||
settings: {
|
||||
"import/resolver": {
|
||||
node: {
|
||||
extensions: [".js", ".jsx", ".ts", ".tsx"],
|
||||
},
|
||||
typescript: true,
|
||||
},
|
||||
react: {
|
||||
version: "detect",
|
||||
},
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: ["*.ts", "*.tsx"],
|
||||
rules: {
|
||||
"import/no-unresolved": "off",
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ["*.js", "*.cjs"],
|
||||
rules: {
|
||||
"@typescript-eslint/no-var-requires": "off",
|
||||
},
|
||||
},
|
||||
],
|
||||
plugins: ["@typescript-eslint"],
|
||||
rules: {
|
||||
"linebreak-style": ["error", "unix"],
|
||||
quotes: ["error", "double", { avoidEscape: true }],
|
||||
semi: ["error", "always"],
|
||||
"@typescript-eslint/no-non-null-assertion": "off",
|
||||
"import/order": [
|
||||
"error",
|
||||
{
|
||||
groups: [
|
||||
"builtin",
|
||||
"external",
|
||||
"internal",
|
||||
"parent",
|
||||
"sibling",
|
||||
"index",
|
||||
],
|
||||
pathGroups: [
|
||||
{ pattern: "react", group: "builtin", position: "before" },
|
||||
{ pattern: "fs-extra", group: "builtin" },
|
||||
{ pattern: "lodash", group: "external", position: "before" },
|
||||
{ pattern: "clsx", group: "external", position: "before" },
|
||||
{ pattern: "@theme/**", group: "internal" },
|
||||
{ pattern: "@site/**", group: "internal" },
|
||||
{ pattern: "@theme-init/**", group: "internal" },
|
||||
{ pattern: "@theme-original/**", group: "internal" },
|
||||
],
|
||||
pathGroupsExcludedImportTypes: [],
|
||||
"newlines-between": "always",
|
||||
alphabetize: {
|
||||
order: "asc",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
14
.github/actions/setup-node/action.yml
vendored
14
.github/actions/setup-node/action.yml
vendored
@@ -4,18 +4,10 @@ description: Setup Node
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
cache: "yarn"
|
||||
|
||||
- id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
|
||||
- run: yarn install
|
||||
- run: yarn install --frozen-lockfile
|
||||
shell: bash
|
||||
|
21
.github/actions/setup-python/action.yml
vendored
21
.github/actions/setup-python/action.yml
vendored
@@ -6,6 +6,14 @@ inputs:
|
||||
description: Python version
|
||||
required: false
|
||||
default: "3.10"
|
||||
env-dir:
|
||||
description: Environment directory
|
||||
required: false
|
||||
default: "."
|
||||
no-root:
|
||||
description: Do not install package in the environment
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
@@ -14,11 +22,20 @@ runs:
|
||||
run: pipx install poetry
|
||||
shell: bash
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
architecture: "x64"
|
||||
cache: "poetry"
|
||||
cache-dependency-path: |
|
||||
./poetry.lock
|
||||
${{ inputs.env-dir }}/poetry.lock
|
||||
|
||||
- run: poetry install -E all
|
||||
- run: |
|
||||
cd ${{ inputs.env-dir }}
|
||||
if [ "${{ inputs.no-root }}" = "true" ]; then
|
||||
poetry install --all-extras --no-root
|
||||
else
|
||||
poetry install --all-extras
|
||||
fi
|
||||
shell: bash
|
||||
|
16
.github/dependabot.yml
vendored
16
.github/dependabot.yml
vendored
@@ -4,18 +4,34 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/actions/build-api-doc"
|
||||
schedule:
|
||||
interval: daily
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/actions/setup-node"
|
||||
schedule:
|
||||
interval: daily
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/actions/setup-python"
|
||||
schedule:
|
||||
interval: daily
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
|
23
.github/workflows/codecov.yml
vendored
23
.github/workflows/codecov.yml
vendored
@@ -6,25 +6,32 @@ on:
|
||||
- master
|
||||
pull_request:
|
||||
paths:
|
||||
- "envs/**"
|
||||
- "nonebot/**"
|
||||
- "packages/**"
|
||||
- "tests/**"
|
||||
- ".github/actions/setup-python/**"
|
||||
- ".github/workflows/codecov.yml"
|
||||
- "pyproject.toml"
|
||||
- "poetry.lock"
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test Coverage
|
||||
runs-on: ${{ matrix.os }}
|
||||
concurrency:
|
||||
group: test-coverage-${{ github.ref }}-${{ matrix.os }}-${{ matrix.python-version }}
|
||||
group: test-coverage-${{ github.ref }}-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.env }}
|
||||
cancel-in-progress: true
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
env: [pydantic-v1, pydantic-v2]
|
||||
fail-fast: false
|
||||
env:
|
||||
OS: ${{ matrix.os }}
|
||||
PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
PYDANTIC_VERSION: ${{ matrix.env }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -33,15 +40,19 @@ jobs:
|
||||
uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
env-dir: ./envs/${{ matrix.env }}
|
||||
no-root: true
|
||||
|
||||
- name: Run Pytest
|
||||
run: |
|
||||
cd tests/
|
||||
poetry run pytest -n auto --cov-report xml
|
||||
cd ./envs/${{ matrix.env }}
|
||||
poetry run bash "../../scripts/run-tests.sh"
|
||||
|
||||
- name: Upload coverage report
|
||||
uses: codecov/codecov-action@v3
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
env_vars: OS,PYTHON_VERSION
|
||||
env_vars: OS,PYTHON_VERSION,PYDANTIC_VERSION
|
||||
files: ./tests/coverage.xml
|
||||
flags: unittests
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
12
.github/workflows/noneflow.yml
vendored
12
.github/workflows/noneflow.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
run: pipx install poetry
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
steps:
|
||||
- name: Generate token
|
||||
id: generate-token
|
||||
uses: tibdex/github-app-token@v1
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_KEY }}
|
||||
@@ -73,7 +73,7 @@ jobs:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Cache pre-commit hooks
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: .cache/.pre-commit
|
||||
key: noneflow-${{ runner.os }}-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
@@ -84,9 +84,9 @@ jobs:
|
||||
config: >
|
||||
{
|
||||
"base": "master",
|
||||
"plugin_path": "website/static/plugins.json",
|
||||
"bot_path": "website/static/bots.json",
|
||||
"adapter_path": "website/static/adapters.json"
|
||||
"plugin_path": "assets/plugins.json",
|
||||
"bot_path": "assets/bots.json",
|
||||
"adapter_path": "assets/adapters.json"
|
||||
}
|
||||
env:
|
||||
PLUGIN_TEST_RESULT: ${{ needs.plugin_test.outputs.result }}
|
||||
|
25
.github/workflows/pyright.yml
vendored
25
.github/workflows/pyright.yml
vendored
@@ -6,21 +6,42 @@ on:
|
||||
- master
|
||||
pull_request:
|
||||
paths:
|
||||
- "envs/**"
|
||||
- "nonebot/**"
|
||||
- "packages/**"
|
||||
- "tests/**"
|
||||
- ".github/actions/setup-python/**"
|
||||
- ".github/workflows/pyright.yml"
|
||||
- "pyproject.toml"
|
||||
- "poetry.lock"
|
||||
|
||||
jobs:
|
||||
pyright:
|
||||
name: Pyright Lint
|
||||
runs-on: ubuntu-latest
|
||||
concurrency:
|
||||
group: pyright-${{ github.ref }}-${{ matrix.env }}
|
||||
cancel-in-progress: true
|
||||
strategy:
|
||||
matrix:
|
||||
env: [pydantic-v1, pydantic-v2]
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python environment
|
||||
uses: ./.github/actions/setup-python
|
||||
with:
|
||||
env-dir: ./envs/${{ matrix.env }}
|
||||
no-root: true
|
||||
|
||||
- run: echo "$(poetry env info --path)/bin" >> $GITHUB_PATH
|
||||
- run: |
|
||||
(cd ./envs/${{ matrix.env }} && echo "$(poetry env info --path)/bin" >> $GITHUB_PATH)
|
||||
if [ "${{ matrix.env }}" = "pydantic-v1" ]; then
|
||||
sed -i 's/PYDANTIC_V2 = true/PYDANTIC_V2 = false/g' ./pyproject.toml
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
- name: Run Pyright
|
||||
uses: jakebailey/pyright-action@v1
|
||||
uses: jakebailey/pyright-action@v2
|
||||
|
66
.github/workflows/release-drafter.yml
vendored
66
.github/workflows/release-drafter.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
steps:
|
||||
- name: Generate token
|
||||
id: generate-token
|
||||
uses: tibdex/github-app-token@v1
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_KEY }}
|
||||
@@ -32,10 +32,10 @@ jobs:
|
||||
- name: Setup Node Environment
|
||||
uses: ./.github/actions/setup-node
|
||||
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
- uses: release-drafter/release-drafter@v6
|
||||
id: release-drafter
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Update Changelog
|
||||
uses: docker://ghcr.io/nonebot/auto-changelog:master
|
||||
@@ -59,7 +59,17 @@ jobs:
|
||||
release:
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Generate token
|
||||
id: generate-token
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_KEY }}
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python Environment
|
||||
@@ -71,33 +81,53 @@ jobs:
|
||||
- name: Build API Doc
|
||||
uses: ./.github/actions/build-api-doc
|
||||
|
||||
- run: |
|
||||
echo "TAG_NAME=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
|
||||
- name: Get Version
|
||||
id: version
|
||||
run: |
|
||||
echo "VERSION=$(poetry version -s)" >> $GITHUB_OUTPUT
|
||||
echo "TAG_VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
|
||||
echo "TAG_NAME=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
- name: Check Version
|
||||
if: steps.version.outputs.VERSION != steps.version.outputs.TAG_VERSION
|
||||
run: exit 1
|
||||
|
||||
- uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
name: Release ${{ env.TAG_NAME }} 🌈
|
||||
tag: ${{ env.TAG_NAME }}
|
||||
name: Release ${{ steps.version.outputs.TAG_NAME }} 🌈
|
||||
tag: ${{ steps.version.outputs.TAG_NAME }}
|
||||
publish: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Build and Publish Package
|
||||
- name: Build Package
|
||||
run: |
|
||||
poetry build
|
||||
poetry publish -u ${{secrets.PYPI_USERNAME}} -p ${{secrets.PYPI_PASSWORD}}
|
||||
gh release upload --clobber ${{ env.TAG_NAME }} dist/*.tar.gz dist/*.whl
|
||||
|
||||
- name: Publish package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
|
||||
- name: Publish package to GitHub
|
||||
run: |
|
||||
gh release upload --clobber ${{ steps.version.outputs.TAG_NAME }} dist/*.tar.gz dist/*.whl
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Build and Publish Doc Package
|
||||
run: |
|
||||
yarn build:plugin --out-dir ../packages/nonebot-plugin-docs/nonebot_plugin_docs/dist
|
||||
export NONEBOT_VERSION=`poetry version -s`
|
||||
cd packages/nonebot-plugin-docs/
|
||||
poetry version $NONEBOT_VERSION
|
||||
poetry version ${{ steps.version.outputs.VERSION }}
|
||||
poetry build
|
||||
poetry publish -u ${{secrets.PYPI_USERNAME}} -p ${{secrets.PYPI_PASSWORD}}
|
||||
gh release upload --clobber ${{ env.TAG_NAME }} dist/*.tar.gz dist/*.whl
|
||||
|
||||
- name: Publish Doc Package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
packages-dir: packages/nonebot-plugin-docs/dist/
|
||||
|
||||
- name: Publish Doc Package to GitHub
|
||||
run: |
|
||||
cd packages/nonebot-plugin-docs/
|
||||
gh release upload --clobber ${{ steps.version.outputs.TAG_NAME }} dist/*.tar.gz dist/*.whl
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
|
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
steps:
|
||||
- name: Generate token
|
||||
id: generate-token
|
||||
uses: tibdex/github-app-token@v1
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_KEY }}
|
||||
|
9
.github/workflows/ruff.yml
vendored
9
.github/workflows/ruff.yml
vendored
@@ -6,14 +6,23 @@ on:
|
||||
- master
|
||||
pull_request:
|
||||
paths:
|
||||
- "envs/**"
|
||||
- "nonebot/**"
|
||||
- "packages/**"
|
||||
- "tests/**"
|
||||
- ".github/actions/setup-python/**"
|
||||
- ".github/workflows/ruff.yml"
|
||||
- "pyproject.toml"
|
||||
- "poetry.lock"
|
||||
|
||||
jobs:
|
||||
ruff:
|
||||
name: Ruff Lint
|
||||
runs-on: ubuntu-latest
|
||||
concurrency:
|
||||
group: pyright-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -139,7 +139,7 @@ fabric.properties
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
# Icon
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
@@ -7,26 +7,26 @@ ci:
|
||||
autoupdate_commit_msg: ":arrow_up: auto update by pre-commit hooks"
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.0.287
|
||||
rev: v0.2.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
stages: [commit]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.12.0
|
||||
rev: 5.13.2
|
||||
hooks:
|
||||
- id: isort
|
||||
stages: [commit]
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.7.0
|
||||
rev: 24.1.1
|
||||
hooks:
|
||||
- id: black
|
||||
stages: [commit]
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.0.3
|
||||
rev: v4.0.0-alpha.8
|
||||
hooks:
|
||||
- id: prettier
|
||||
types_or: [javascript, jsx, ts, tsx, markdown, yaml, json]
|
||||
|
14
.prettierrc
14
.prettierrc
@@ -5,5 +5,17 @@
|
||||
"arrowParens": "always",
|
||||
"singleQuote": false,
|
||||
"trailingComma": "es5",
|
||||
"semi": true
|
||||
"semi": true,
|
||||
"overrides": [
|
||||
{
|
||||
"files": [
|
||||
"**/devcontainer.json",
|
||||
"**/tsconfig.json",
|
||||
"**/tsconfig.*.json"
|
||||
],
|
||||
"options": {
|
||||
"parser": "json"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
31
.stylelintrc.js
Normal file
31
.stylelintrc.js
Normal file
@@ -0,0 +1,31 @@
|
||||
module.exports = {
|
||||
extends: ["stylelint-config-standard", "stylelint-prettier/recommended"],
|
||||
overrides: [
|
||||
{
|
||||
files: ["*.css"],
|
||||
rules: {
|
||||
"function-no-unknown": [true, { ignoreFunctions: ["theme"] }],
|
||||
"selector-class-pattern": [
|
||||
"^([a-z][a-z0-9]*)(-[a-z0-9]+)*$",
|
||||
{
|
||||
resolveNestedSelectors: true,
|
||||
message: (selector) =>
|
||||
`Expected class selector "${selector}" to be kebab-case`,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ["*.module.css"],
|
||||
rules: {
|
||||
"selector-class-pattern": [
|
||||
"^[a-z][a-zA-Z0-9]+$",
|
||||
{
|
||||
message: (selector) =>
|
||||
`Expected class selector "${selector}" to be lowerCamelCase`,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
26
CITATION.cff
Normal file
26
CITATION.cff
Normal file
@@ -0,0 +1,26 @@
|
||||
# This CITATION.cff file was generated with cffinit.
|
||||
# Visit https://bit.ly/cffinit to generate yours today!
|
||||
|
||||
cff-version: 1.2.0
|
||||
title: NoneBot
|
||||
message: >-
|
||||
If you use this software, please cite it using the
|
||||
metadata from this file.
|
||||
type: software
|
||||
authors:
|
||||
- given-names: Yongyu
|
||||
family-names: Yan
|
||||
email: yyy@nonebot.dev
|
||||
- name: NoneBot Team
|
||||
email: contact@nonebot.dev
|
||||
website: 'https://github.com/nonebot'
|
||||
repository-code: 'https://github.com/nonebot/nonebot2'
|
||||
url: 'https://nonebot.dev/'
|
||||
abstract: >-
|
||||
NoneBot, an asynchronous multi-platform chatbot framework
|
||||
written in Python
|
||||
keywords:
|
||||
- nonebot
|
||||
- chatbot
|
||||
- pydantic
|
||||
license: MIT
|
71
README.md
71
README.md
@@ -54,6 +54,9 @@ _✨ 跨平台 Python 异步机器人框架 ✨_
|
||||
<a href="https://onebot.dev/">
|
||||
<img src="https://img.shields.io/badge/OneBot-v12-black?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABABAMAAABYR2ztAAAAIVBMVEUAAAAAAAADAwMHBwceHh4UFBQNDQ0ZGRkoKCgvLy8iIiLWSdWYAAAAAXRSTlMAQObYZgAAAQVJREFUSMftlM0RgjAQhV+0ATYK6i1Xb+iMd0qgBEqgBEuwBOxU2QDKsjvojQPvkJ/ZL5sXkgWrFirK4MibYUdE3OR2nEpuKz1/q8CdNxNQgthZCXYVLjyoDQftaKuniHHWRnPh2GCUetR2/9HsMAXyUT4/3UHwtQT2AggSCGKeSAsFnxBIOuAggdh3AKTL7pDuCyABcMb0aQP7aM4AnAbc/wHwA5D2wDHTTe56gIIOUA/4YYV2e1sg713PXdZJAuncdZMAGkAukU9OAn40O849+0ornPwT93rphWF0mgAbauUrEOthlX8Zu7P5A6kZyKCJy75hhw1Mgr9RAUvX7A3csGqZegEdniCx30c3agAAAABJRU5ErkJggg==" alt="onebot">
|
||||
</a>
|
||||
<a href="https://bot.q.qq.com/wiki/">
|
||||
<img src="https://img.shields.io/badge/QQ-Bot-lightgrey?style=social&logo=data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMTIuODIgMTMwLjg5Ij48ZyBkYXRhLW5hbWU9IuWbvuWxgiAyIj48ZyBkYXRhLW5hbWU9IuWbvuWxgiAxIj48cGF0aCBkPSJNNTUuNjMgMTMwLjhjLTcgMC0xMy45LjA4LTIwLjg2IDAtMTkuMTUtLjI1LTMxLjcxLTExLjQtMzQuMjItMzAuMy00LjA3LTMwLjY2IDE0LjkzLTU5LjIgNDQuODMtNjYuNjQgMi0uNTEgNS4yMS0uMzEgNS4yMS0xLjYzIDAtMi4xMy4xNC0yLjEzLjE0LTUuNTcgMC0uODktMS4zLTEuNDYtMi4yMi0yLjMxLTYuNzMtNi4yMy03LjY3LTEzLjQxLTEtMjAuMTggNS40LTUuNTIgMTEuODctNS40IDE3LjgtLjU5IDYuNDkgNS4yNiA2LjMxIDEzLjA4LS44NiAyMS0uNjguNzQtMS43OCAxLjYtMS43OCAyLjY3djQuMjFjMCAxLjM1IDIuMiAxLjYyIDQuNzkgMi4zNSAzMS4wOSA4LjY1IDQ4LjE3IDM0LjEzIDQ1IDY2LjM3LTEuNzYgMTguMTUtMTQuNTYgMzAuMjMtMzIuNyAzMC42My04LjAyLjE5LTE2LjA3LS4wMS0yNC4xMy0uMDF6IiBmaWxsPSIjMDI5OWZlIi8+PHBhdGggZD0iTTMxLjQ2IDExOC4zOGMtMTAuNS0uNjktMTYuOC02Ljg2LTE4LjM4LTE3LjI3LTMtMTkuNDIgMi43OC0zNS44NiAxOC40Ni00Ny44MyAxNC4xNi0xMC44IDI5Ljg3LTEyIDQ1LjM4LTMuMTkgMTcuMjUgOS44NCAyNC41OSAyNS44MSAyNCA0NS4yOS0uNDkgMTUuOS04LjQyIDIzLjE0LTI0LjM4IDIzLjUtNi41OS4xNC0xMy4xOSAwLTE5Ljc5IDAiIGZpbGw9IiNmZWZlZmUiLz48cGF0aCBkPSJNNDYuMDUgNzkuNThjLjA5IDUgLjIzIDkuODItNyA5Ljc3LTcuODItLjA2LTYuMS01LjY5LTYuMjQtMTAuMTktLjE1LTQuODItLjczLTEwIDYuNzMtOS44NHM2LjM3IDUuNTUgNi41MSAxMC4yNnoiIGZpbGw9IiMxMDlmZmUiLz48cGF0aCBkPSJNODAuMjcgNzkuMjdjLS41MyAzLjkxIDEuNzUgOS42NC01Ljg4IDEwLTcuNDcuMzctNi44MS00LjgyLTYuNjEtOS41LjItNC4zMi0xLjgzLTEwIDUuNzgtMTAuNDJzNi41OSA0Ljg5IDYuNzEgOS45MnoiIGZpbGw9IiMwODljZmUiLz48L2c+PC9nPjwvc3ZnPg==" alt="QQ">
|
||||
</a>
|
||||
<a href="https://core.telegram.org/bots/api">
|
||||
<img src="https://img.shields.io/badge/telegram-Bot-lightgrey?style=social&logo=telegram" alt="telegram">
|
||||
</a>
|
||||
@@ -63,9 +66,6 @@ _✨ 跨平台 Python 异步机器人框架 ✨_
|
||||
<a href="https://docs.github.com/en/developers/apps">
|
||||
<img src="https://img.shields.io/badge/GitHub-Bot-181717?style=social&logo=github" alt="github"/>
|
||||
</a>
|
||||
<a href="https://bot.q.qq.com/wiki/">
|
||||
<img src="https://img.shields.io/badge/QQ%E9%A2%91%E9%81%93-Bot-lightgrey?style=social&logo=data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMTIuODIgMTMwLjg5Ij48ZyBkYXRhLW5hbWU9IuWbvuWxgiAyIj48ZyBkYXRhLW5hbWU9IuWbvuWxgiAxIj48cGF0aCBkPSJNNTUuNjMgMTMwLjhjLTcgMC0xMy45LjA4LTIwLjg2IDAtMTkuMTUtLjI1LTMxLjcxLTExLjQtMzQuMjItMzAuMy00LjA3LTMwLjY2IDE0LjkzLTU5LjIgNDQuODMtNjYuNjQgMi0uNTEgNS4yMS0uMzEgNS4yMS0xLjYzIDAtMi4xMy4xNC0yLjEzLjE0LTUuNTcgMC0uODktMS4zLTEuNDYtMi4yMi0yLjMxLTYuNzMtNi4yMy03LjY3LTEzLjQxLTEtMjAuMTggNS40LTUuNTIgMTEuODctNS40IDE3LjgtLjU5IDYuNDkgNS4yNiA2LjMxIDEzLjA4LS44NiAyMS0uNjguNzQtMS43OCAxLjYtMS43OCAyLjY3djQuMjFjMCAxLjM1IDIuMiAxLjYyIDQuNzkgMi4zNSAzMS4wOSA4LjY1IDQ4LjE3IDM0LjEzIDQ1IDY2LjM3LTEuNzYgMTguMTUtMTQuNTYgMzAuMjMtMzIuNyAzMC42My04LjAyLjE5LTE2LjA3LS4wMS0yNC4xMy0uMDF6IiBmaWxsPSIjMDI5OWZlIi8+PHBhdGggZD0iTTMxLjQ2IDExOC4zOGMtMTAuNS0uNjktMTYuOC02Ljg2LTE4LjM4LTE3LjI3LTMtMTkuNDIgMi43OC0zNS44NiAxOC40Ni00Ny44MyAxNC4xNi0xMC44IDI5Ljg3LTEyIDQ1LjM4LTMuMTkgMTcuMjUgOS44NCAyNC41OSAyNS44MSAyNCA0NS4yOS0uNDkgMTUuOS04LjQyIDIzLjE0LTI0LjM4IDIzLjUtNi41OS4xNC0xMy4xOSAwLTE5Ljc5IDAiIGZpbGw9IiNmZWZlZmUiLz48cGF0aCBkPSJNNDYuMDUgNzkuNThjLjA5IDUgLjIzIDkuODItNyA5Ljc3LTcuODItLjA2LTYuMS01LjY5LTYuMjQtMTAuMTktLjE1LTQuODItLjczLTEwIDYuNzMtOS44NHM2LjM3IDUuNTUgNi41MSAxMC4yNnoiIGZpbGw9IiMxMDlmZmUiLz48cGF0aCBkPSJNODAuMjcgNzkuMjdjLS41MyAzLjkxIDEuNzUgOS42NC01Ljg4IDEwLTcuNDcuMzctNi44MS00LjgyLTYuNjEtOS41LjItNC4zMi0xLjgzLTEwIDUuNzgtMTAuNDJzNi41OSA0Ljg5IDYuNzEgOS45MnoiIGZpbGw9IiMwODljZmUiLz48L2c+PC9nPjwvc3ZnPg==" alt="QQ频道">
|
||||
</a>
|
||||
<!-- <a href="https://ding-doc.dingtalk.com/document#/org-dev-guide/elzz1p">
|
||||
<img src="https://img.shields.io/badge/%E9%92%89%E9%92%89-Bot-lightgrey?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAMAAACdt4HsAAAAnFBMVEUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD4jUzeAAAAM3RSTlMAQKSRaA+/f0YyFevh29R3cyklIfrlyrGsn41tVUs48c/HqJm9uZdhX1otGwkF9IN8V1CX0Q+IAAABY0lEQVRYw+3V2W7CMBAF0JuNQAhhX9OEfYdu9///rUVWpagE27Ef2gfO+0zGozsKnv6bMGzAhkNytIe5gDdzrwtTCwrbI8x4/NF668NAxgI3Q3UtFi3TyPwNQtPLUUmDd8YfqGLNe4v22XwEYb5zoOuF5baHq2UHtsKe5ivWfGAwrWu2mC34QM0PoCAuqZdOmiwV+5BLyMRtZ7dTSEcs48rzWfzwptMLyzpApka1SJ5FtR4kfCqNIBPEVDmqoqgwUYY5plQOlf6UEjNoOPnuKB6wzDyCrks///TDza8+PnR109WQdxLo8RKWq0PPnuXG0OXKQ6wWLFnCg75uYYbhmMIVVdQ709q33aHbGIj6Duz+2k1HQFX9VwqmY8xYsEJll2ahvhWgsjYLHFRXvIi2Qb0jzMQCzC3FAoydxCma88UCzE3JCWwkjCNYyMUCzHX4DiuTMawEwwhW6hnshPhjZzzJfAH0YacpbmRd7QAAAABJRU5ErkJggg==" alt="dingtalk"> -->
|
||||
</a>
|
||||
@@ -94,7 +94,7 @@ _✨ 跨平台 Python 异步机器人框架 ✨_
|
||||
|
||||
<p align="center">
|
||||
<a href="https://asciinema.org/a/569440">
|
||||
<img src="https://nonebot.dev/img/setup.svg">
|
||||
<img src="https://nonebot.dev/img/setup.svg" alt="setup" >
|
||||
</a>
|
||||
</p>
|
||||
|
||||
@@ -116,11 +116,13 @@ NoneBot2 是一个现代、跨平台、可扩展的 Python 聊天机器人框架
|
||||
| Telegram([仓库](https://github.com/nonebot/adapter-telegram),[协议](https://core.telegram.org/bots/api)) | ✅ | |
|
||||
| 飞书([仓库](https://github.com/nonebot/adapter-feishu),[协议](https://open.feishu.cn/document/home/index)) | ✅ | |
|
||||
| GitHub([仓库](https://github.com/nonebot/adapter-github),[协议](https://docs.github.com/en/apps)) | ✅ | GitHub APP & OAuth APP |
|
||||
| QQ 频道([仓库](https://github.com/nonebot/adapter-qqguild),[协议](https://bot.q.qq.com/wiki/)) | ✅ | 官方接口调整较多 |
|
||||
| QQ([仓库](https://github.com/nonebot/adapter-qq),[协议](https://bot.q.qq.com/wiki/)) | ✅ | QQ 官方接口调整较多 |
|
||||
| 钉钉([仓库](https://github.com/nonebot/adapter-ding),[协议](https://open.dingtalk.com/document/)) | 🤗 | 寻找 Maintainer(暂不可用) |
|
||||
| Console([仓库](https://github.com/nonebot/adapter-console)) | ✅ | 控制台交互 |
|
||||
| Red ([仓库](https://github.com/nonebot/adapter-red),[协议](https://chrononeko.github.io/QQNTRedProtocol/)) | ✅ | QQ 协议 |
|
||||
| Satori([仓库](https://github.com/nonebot/adapter-satori),[协议](https://satori.js.org/zh-CN)) | ✅ | 支持 Onebot、TG、飞书、微信公众号、Koishi 等 |
|
||||
| Discord ([仓库](https://github.com/nonebot/adapter-discord),[协议](https://discord.com/developers/docs/intro)) | ✅ | Discord Bot 协议 |
|
||||
| DoDo ([仓库](https://github.com/nonebot/adapter-dodo),[协议](https://open.imdodo.com/)) | ✅ | DoDo Bot 协议 |
|
||||
| 开黑啦([仓库](https://github.com/Tian-que/nonebot-adapter-kaiheila),[协议](https://developer.kookapp.cn/)) | ↗️ | 由社区贡献 |
|
||||
| Mirai([仓库](https://github.com/ieew/nonebot_adapter_mirai2),[协议](https://docs.mirai.mamoe.net/mirai-api-http/)) | ↗️ | QQ 协议,由社区贡献 |
|
||||
| Ntchat([仓库](https://github.com/JustUndertaker/adapter-ntchat)) | ↗️ | 微信协议,由社区贡献 |
|
||||
@@ -206,9 +208,8 @@ NoneBot2 不是 NoneBot1 的替代品。事实上,它们都在被积极的维
|
||||
或者尝试以下镜像:
|
||||
|
||||
- [文档镜像(中国境内)](https://nb2.baka.icu)
|
||||
- [文档镜像(Vercel)](https://nonebot2-vercel-mirror.vercel.app)
|
||||
|
||||
- 其他插件请查看 [商店](https://nonebot.dev/store)
|
||||
- 其他插件请查看 [商店](https://nonebot.dev/store/plugins)
|
||||
|
||||
## 许可证
|
||||
|
||||
@@ -227,10 +228,62 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
请参考 [贡献指南](./CONTRIBUTING.md)
|
||||
|
||||
### 鸣谢
|
||||
## 鸣谢
|
||||
|
||||
### 赞助者
|
||||
|
||||
感谢以下产品对 NoneBot 项目提供的赞助:
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://assets.nonebot.dev/github-dark.png">
|
||||
<img src="https://assets.nonebot.dev/github-light.png" height="50" alt="GitHub">
|
||||
</picture>
|
||||
</a>
|
||||
<a href="https://www.netlify.com/">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://assets.nonebot.dev/netlify-dark.svg">
|
||||
<img src="https://assets.nonebot.dev/netlify-light.svg" height="50" alt="netlify">
|
||||
</picture>
|
||||
</a>
|
||||
<a href="https://sentry.io/">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://assets.nonebot.dev/sentry-dark.svg">
|
||||
<img src="https://assets.nonebot.dev/sentry-light.svg" height="50" alt="sentry">
|
||||
</picture>
|
||||
</a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://www.docker.com/">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://assets.nonebot.dev/docker-dark.svg">
|
||||
<img src="https://assets.nonebot.dev/docker-light.svg" height="50" alt="docker">
|
||||
</picture>
|
||||
</a>
|
||||
<a href="https://www.algolia.com/">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://assets.nonebot.dev/algolia-dark.svg">
|
||||
<img src="https://assets.nonebot.dev/algolia-light.svg" height="50" alt="algolia">
|
||||
</picture>
|
||||
</a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://www.jetbrains.com/">
|
||||
<img src="https://resources.jetbrains.com/storage/products/company/brand/logos/jb_beam.svg" height="80" alt="JetBrains" >
|
||||
</a>
|
||||
</p>
|
||||
|
||||
感谢以下赞助者对 NoneBot 项目提供的资金支持:
|
||||
|
||||
<a href="https://assets.nonebot.dev/sponsors.svg">
|
||||
<img src="https://assets.nonebot.dev/sponsors.svg" alt="sponsors" />
|
||||
</a>
|
||||
|
||||
### 开发者
|
||||
|
||||
感谢以下开发者对 NoneBot2 作出的贡献:
|
||||
|
||||
<a href="https://github.com/nonebot/nonebot2/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=nonebot/nonebot2&max=1000" />
|
||||
<img src="https://contrib.rocks/image?repo=nonebot/nonebot2&max=1000" alt="contributors" />
|
||||
</a>
|
||||
|
@@ -40,12 +40,12 @@
|
||||
"is_official": true
|
||||
},
|
||||
{
|
||||
"module_name": "nonebot.adapters.qqguild",
|
||||
"project_link": "nonebot-adapter-qqguild",
|
||||
"name": "QQ 频道",
|
||||
"desc": "QQ 频道官方机器人",
|
||||
"module_name": "nonebot.adapters.qq",
|
||||
"project_link": "nonebot-adapter-qq",
|
||||
"name": "QQ",
|
||||
"desc": "QQ 官方机器人",
|
||||
"author": "yanyongyu",
|
||||
"homepage": "https://github.com/nonebot/adapter-qqguild",
|
||||
"homepage": "https://github.com/nonebot/adapter-qq",
|
||||
"tags": [],
|
||||
"is_official": true
|
||||
},
|
||||
@@ -188,5 +188,30 @@
|
||||
"homepage": "https://github.com/nonebot/adapter-discord",
|
||||
"tags": [],
|
||||
"is_official": true
|
||||
},
|
||||
{
|
||||
"module_name": "nonebot.adapters.satori",
|
||||
"project_link": "nonebot-adapter-satori",
|
||||
"name": "Satori",
|
||||
"desc": "Satori 协议适配器",
|
||||
"author": "RF-Tar-Railt",
|
||||
"homepage": "https://github.com/nonebot/adapter-satori",
|
||||
"tags": [
|
||||
{
|
||||
"label": "跨平台",
|
||||
"color": "#bf40bf"
|
||||
}
|
||||
],
|
||||
"is_official": true
|
||||
},
|
||||
{
|
||||
"module_name": "nonebot.adapters.dodo",
|
||||
"project_link": "nonebot-adapter-dodo",
|
||||
"name": "DoDo",
|
||||
"desc": "DoDo Bot 协议适配器",
|
||||
"author": "CMHopeSunshine",
|
||||
"homepage": "https://github.com/nonebot/adapter-dodo",
|
||||
"tags": [],
|
||||
"is_official": true
|
||||
}
|
||||
]
|
@@ -574,5 +574,38 @@
|
||||
}
|
||||
],
|
||||
"is_official": false
|
||||
},
|
||||
{
|
||||
"name": "妃爱",
|
||||
"desc": "超可爱的妃爱QQ群聊机器人",
|
||||
"author": "jiangyuxiaoxiao",
|
||||
"homepage": "https://github.com/jiangyuxiaoxiao/Hiyori",
|
||||
"tags": [],
|
||||
"is_official": false
|
||||
},
|
||||
{
|
||||
"name": "芙芙",
|
||||
"desc": "供 Mooncell Wiki 协作使用的跨平台机器人",
|
||||
"author": "StarHeartHunt",
|
||||
"homepage": "https://github.com/MooncellWiki/BotFooChan",
|
||||
"tags": [],
|
||||
"is_official": false
|
||||
},
|
||||
{
|
||||
"name": "Sakiko",
|
||||
"desc": "基于 LiteLoaderBDS 的 Minecraft 基岩版 Bot",
|
||||
"author": "zhaomaoniu",
|
||||
"homepage": "https://github.com/zhaomaoniu/Sakiko",
|
||||
"tags": [
|
||||
{
|
||||
"label": "Minecraft",
|
||||
"color": "#6cc349"
|
||||
},
|
||||
{
|
||||
"label": "BanGDream",
|
||||
"color": "#e70050"
|
||||
}
|
||||
],
|
||||
"is_official": false
|
||||
}
|
||||
]
|
5504
assets/plugins.json
Normal file
5504
assets/plugins.json
Normal file
File diff suppressed because it is too large
Load Diff
2166
envs/pydantic-v1/poetry.lock
generated
Normal file
2166
envs/pydantic-v1/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
18
envs/pydantic-v1/pyproject.toml
Normal file
18
envs/pydantic-v1/pyproject.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[tool.poetry]
|
||||
name = "nonebot-pydantic-v1"
|
||||
version = "0.1.0"
|
||||
description = "Private pydantic v1 test env for nonebot"
|
||||
authors = ["yanyongyu <yyy@nonebot.dev>"]
|
||||
license = "MIT"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pydantic = "^1.0.0"
|
||||
nonebot-test = { path = "../test/", develop = false }
|
||||
nonebot2 = { path = "../../", extras = ["all"], develop = true }
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
2238
envs/pydantic-v2/poetry.lock
generated
Normal file
2238
envs/pydantic-v2/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
18
envs/pydantic-v2/pyproject.toml
Normal file
18
envs/pydantic-v2/pyproject.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[tool.poetry]
|
||||
name = "nonebot-pydantic-v2"
|
||||
version = "0.1.0"
|
||||
description = "Private pydantic v2 test env for nonebot"
|
||||
authors = ["yanyongyu <yyy@nonebot.dev>"]
|
||||
license = "MIT"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pydantic = "^2.0.0"
|
||||
nonebot-test = { path = "../test/", develop = false }
|
||||
nonebot2 = { path = "../../", extras = ["all"], develop = true }
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
1
envs/test/nonebot-test.py
Normal file
1
envs/test/nonebot-test.py
Normal file
@@ -0,0 +1 @@
|
||||
# fake file to make project installable
|
957
envs/test/poetry.lock
generated
Normal file
957
envs/test/poetry.lock
generated
Normal file
@@ -0,0 +1,957 @@
|
||||
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "asgiref"
|
||||
version = "3.7.2"
|
||||
description = "ASGI specs, helper code, and adapters"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"},
|
||||
{file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
|
||||
|
||||
[[package]]
|
||||
name = "async-asgi-testclient"
|
||||
version = "1.4.11"
|
||||
description = "Async client for testing ASGI web applications"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "async-asgi-testclient-1.4.11.tar.gz", hash = "sha256:4449ac85d512d661998ec61f91c9ae01851639611d748d81ae7f816736551792"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
multidict = ">=4.0,<7.0"
|
||||
requests = ">=2.21,<3.0"
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2024.2.2"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
|
||||
{file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.3.2"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
|
||||
{file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
description = "Cross-platform colored terminal text."
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.4.1"
|
||||
description = "Code coverage measurement for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"},
|
||||
{file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"},
|
||||
{file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"},
|
||||
{file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"},
|
||||
{file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"},
|
||||
{file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"},
|
||||
{file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"},
|
||||
{file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"},
|
||||
{file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"},
|
||||
{file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"},
|
||||
{file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"},
|
||||
{file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"},
|
||||
{file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"},
|
||||
{file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"},
|
||||
{file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"},
|
||||
{file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"},
|
||||
{file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"},
|
||||
{file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"},
|
||||
{file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"},
|
||||
{file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"},
|
||||
{file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"},
|
||||
{file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"},
|
||||
{file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"},
|
||||
{file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"},
|
||||
{file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"},
|
||||
{file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"},
|
||||
{file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"},
|
||||
{file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"},
|
||||
{file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"},
|
||||
{file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"},
|
||||
{file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"},
|
||||
{file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"},
|
||||
{file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"},
|
||||
{file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"},
|
||||
{file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"},
|
||||
{file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"},
|
||||
{file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"},
|
||||
{file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"},
|
||||
{file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"},
|
||||
{file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"},
|
||||
{file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"},
|
||||
{file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"},
|
||||
{file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"},
|
||||
{file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"},
|
||||
{file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"},
|
||||
{file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"},
|
||||
{file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"},
|
||||
{file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"},
|
||||
{file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"},
|
||||
{file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"},
|
||||
{file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"},
|
||||
{file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
|
||||
|
||||
[package.extras]
|
||||
toml = ["tomli"]
|
||||
|
||||
[[package]]
|
||||
name = "coverage-conditional-plugin"
|
||||
version = "0.9.0"
|
||||
description = "Conditional coverage based on any rules you define!"
|
||||
optional = false
|
||||
python-versions = ">=3.7,<4.0"
|
||||
files = [
|
||||
{file = "coverage_conditional_plugin-0.9.0-py3-none-any.whl", hash = "sha256:1b37bc469019d2ab5b01f5eee453abe1846b3431e64e209720c2a9ec4afb8130"},
|
||||
{file = "coverage_conditional_plugin-0.9.0.tar.gz", hash = "sha256:6893dab0542695dbd5ea714281dae0dfec8d0e36480ba32d839e9fa7344f8215"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
coverage = ">=7,<8"
|
||||
importlib_metadata = {version = "*", markers = "python_version < \"3.10\""}
|
||||
packaging = ">=20.4"
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.2.0"
|
||||
description = "Backport of PEP 654 (exception groups)"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"},
|
||||
{file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "execnet"
|
||||
version = "2.0.2"
|
||||
description = "execnet: rapid multi-Python deployment"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"},
|
||||
{file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
testing = ["hatch", "pre-commit", "pytest", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.14.0"
|
||||
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
|
||||
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.6"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
|
||||
{file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "importlib-metadata"
|
||||
version = "7.0.1"
|
||||
description = "Read metadata from Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"},
|
||||
{file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
zipp = ">=0.5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
perf = ["ipython"]
|
||||
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.0.0"
|
||||
description = "brain-dead simple config-ini parsing"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
|
||||
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "loguru"
|
||||
version = "0.7.2"
|
||||
description = "Python logging made (stupidly) simple"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"},
|
||||
{file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""}
|
||||
win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
|
||||
|
||||
[package.extras]
|
||||
dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "markupsafe"
|
||||
version = "2.1.5"
|
||||
description = "Safely add untrusted strings to HTML/XML markup."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
|
||||
{file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "multidict"
|
||||
version = "6.0.5"
|
||||
description = "multidict implementation"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"},
|
||||
{file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"},
|
||||
{file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"},
|
||||
{file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"},
|
||||
{file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"},
|
||||
{file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"},
|
||||
{file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"},
|
||||
{file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"},
|
||||
{file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nonebot2"
|
||||
version = "2.1.3"
|
||||
description = "An asynchronous python bot framework."
|
||||
optional = false
|
||||
python-versions = ">=3.8,<4.0"
|
||||
files = [
|
||||
{file = "nonebot2-2.1.3-py3-none-any.whl", hash = "sha256:c36c1a60ce4355d9777fee431c08619f22ffd60f7060993fbbbd1fe67b6368f7"},
|
||||
{file = "nonebot2-2.1.3.tar.gz", hash = "sha256:e750e615f1ad2503721ce055fbe55ec3b061277135d995be112fecd27f7232e5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
loguru = ">=0.6.0,<1.0.0"
|
||||
pydantic = {version = ">=1.10.0,<2.0.0", extras = ["dotenv"]}
|
||||
pygtrie = ">=2.4.1,<3.0.0"
|
||||
tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""}
|
||||
typing-extensions = ">=4.4.0,<5.0.0"
|
||||
yarl = ">=1.7.2,<2.0.0"
|
||||
|
||||
[package.extras]
|
||||
aiohttp = ["aiohttp[speedups] (>=3.9.0b0,<4.0.0)"]
|
||||
all = ["Quart (>=0.18.0,<1.0.0)", "aiohttp[speedups] (>=3.9.0b0,<4.0.0)", "fastapi (>=0.93.0,<1.0.0)", "httpx[http2] (>=0.20.0,<1.0.0)", "uvicorn[standard] (>=0.20.0,<1.0.0)", "websockets (>=10.0)"]
|
||||
fastapi = ["fastapi (>=0.93.0,<1.0.0)", "uvicorn[standard] (>=0.20.0,<1.0.0)"]
|
||||
httpx = ["httpx[http2] (>=0.20.0,<1.0.0)"]
|
||||
quart = ["Quart (>=0.18.0,<1.0.0)", "uvicorn[standard] (>=0.20.0,<1.0.0)"]
|
||||
websockets = ["websockets (>=10.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "nonebug"
|
||||
version = "0.3.5"
|
||||
description = "nonebot2 test framework"
|
||||
optional = false
|
||||
python-versions = ">=3.8,<4.0"
|
||||
files = [
|
||||
{file = "nonebug-0.3.5-py3-none-any.whl", hash = "sha256:588831b08b3ea42d058874214bedae646e2ab8c1ec4ae1540ff789873107a8fa"},
|
||||
{file = "nonebug-0.3.5.tar.gz", hash = "sha256:4d4bf9448cd1cbfaaabaab73dbe4ac8757e86dd92a41ef79cdece8dd61e724e2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
asgiref = ">=3.4.0,<4.0.0"
|
||||
async-asgi-testclient = ">=1.4.8,<2.0.0"
|
||||
nonebot2 = ">=2.0.0-rc.2,<3.0.0"
|
||||
pytest = ">=7.0.0,<8.0.0"
|
||||
typing-extensions = ">=4.0.0,<5.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "23.2"
|
||||
description = "Core utilities for Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
|
||||
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.4.0"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"},
|
||||
{file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "tox"]
|
||||
testing = ["pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "1.10.14"
|
||||
description = "Data validation and settings management using python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"},
|
||||
{file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"},
|
||||
{file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"},
|
||||
{file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"},
|
||||
{file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"},
|
||||
{file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"},
|
||||
{file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"},
|
||||
{file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"},
|
||||
{file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"},
|
||||
{file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"},
|
||||
{file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"},
|
||||
{file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"},
|
||||
{file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"},
|
||||
{file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"},
|
||||
{file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"},
|
||||
{file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"},
|
||||
{file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"},
|
||||
{file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"},
|
||||
{file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"},
|
||||
{file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"},
|
||||
{file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"},
|
||||
{file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"},
|
||||
{file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"},
|
||||
{file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"},
|
||||
{file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"},
|
||||
{file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"},
|
||||
{file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"},
|
||||
{file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"},
|
||||
{file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"},
|
||||
{file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"},
|
||||
{file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"},
|
||||
{file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"},
|
||||
{file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"},
|
||||
{file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"},
|
||||
{file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"},
|
||||
{file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
python-dotenv = {version = ">=0.10.4", optional = true, markers = "extra == \"dotenv\""}
|
||||
typing-extensions = ">=4.2.0"
|
||||
|
||||
[package.extras]
|
||||
dotenv = ["python-dotenv (>=0.10.4)"]
|
||||
email = ["email-validator (>=1.0.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "pygtrie"
|
||||
version = "2.5.0"
|
||||
description = "A pure Python trie data structure implementation."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "pygtrie-2.5.0-py3-none-any.whl", hash = "sha256:8795cda8105493d5ae159a5bef313ff13156c5d4d72feddefacaad59f8c8ce16"},
|
||||
{file = "pygtrie-2.5.0.tar.gz", hash = "sha256:203514ad826eb403dab1d2e2ddd034e0d1534bbe4dbe0213bb0593f66beba4e2"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "7.4.4"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"},
|
||||
{file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
||||
iniconfig = "*"
|
||||
packaging = "*"
|
||||
pluggy = ">=0.12,<2.0"
|
||||
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "0.23.4"
|
||||
description = "Pytest support for asyncio"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pytest-asyncio-0.23.4.tar.gz", hash = "sha256:2143d9d9375bf372a73260e4114541485e84fca350b0b6b92674ca56ff5f7ea2"},
|
||||
{file = "pytest_asyncio-0.23.4-py3-none-any.whl", hash = "sha256:b0079dfac14b60cd1ce4691fbfb1748fe939db7d0234b5aba97197d10fbe0fef"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=7.0.0,<8"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
|
||||
testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-cov"
|
||||
version = "4.1.0"
|
||||
description = "Pytest plugin for measuring coverage."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
|
||||
{file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
coverage = {version = ">=5.2.1", extras = ["toml"]}
|
||||
pytest = ">=4.6"
|
||||
|
||||
[package.extras]
|
||||
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-xdist"
|
||||
version = "3.5.0"
|
||||
description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"},
|
||||
{file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
execnet = ">=1.1"
|
||||
pytest = ">=6.2.0"
|
||||
|
||||
[package.extras]
|
||||
psutil = ["psutil (>=3.0)"]
|
||||
setproctitle = ["setproctitle"]
|
||||
testing = ["filelock"]
|
||||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.0.1"
|
||||
description = "Read key-value pairs from a .env file and set them as environment variables"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
|
||||
{file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
cli = ["click (>=5.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.31.0"
|
||||
description = "Python HTTP for Humans."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
|
||||
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<3"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.0.1"
|
||||
description = "A lil' TOML parser"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
|
||||
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.9.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.8+"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"},
|
||||
{file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.2.0"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"},
|
||||
{file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
|
||||
h2 = ["h2 (>=4,<5)"]
|
||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "werkzeug"
|
||||
version = "3.0.1"
|
||||
description = "The comprehensive WSGI web application library."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"},
|
||||
{file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
MarkupSafe = ">=2.1.1"
|
||||
|
||||
[package.extras]
|
||||
watchdog = ["watchdog (>=2.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "win32-setctime"
|
||||
version = "1.1.0"
|
||||
description = "A small Python utility to set file creation time on Windows"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
|
||||
{file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "wsproto"
|
||||
version = "1.2.0"
|
||||
description = "WebSockets state-machine based protocol implementation"
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"},
|
||||
{file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
h11 = ">=0.9.0,<1"
|
||||
|
||||
[[package]]
|
||||
name = "yarl"
|
||||
version = "1.9.4"
|
||||
description = "Yet another URL library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"},
|
||||
{file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"},
|
||||
{file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"},
|
||||
{file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"},
|
||||
{file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"},
|
||||
{file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"},
|
||||
{file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"},
|
||||
{file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"},
|
||||
{file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
idna = ">=2.0"
|
||||
multidict = ">=4.0"
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
version = "3.17.0"
|
||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"},
|
||||
{file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.8"
|
||||
content-hash = "ab5729309587cb130ac7848e8862368995372cf2fc91d0966598b3c6b49028e5"
|
21
envs/test/pyproject.toml
Normal file
21
envs/test/pyproject.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[tool.poetry]
|
||||
name = "nonebot-test"
|
||||
version = "0.1.0"
|
||||
description = "Private test env for nonebot"
|
||||
authors = ["yanyongyu <yyy@nonebot.dev>"]
|
||||
license = "MIT"
|
||||
packages = [{ include = "nonebot-test.py" }]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
nonebug = "^0.3.0"
|
||||
wsproto = "^1.2.0"
|
||||
pytest-cov = "^4.0.0"
|
||||
pytest-xdist = "^3.0.2"
|
||||
pytest-asyncio = "^0.23.2"
|
||||
werkzeug = ">=2.3.6,<4.0.0"
|
||||
coverage-conditional-plugin = "^0.9.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
@@ -35,6 +35,7 @@
|
||||
{ref}``get_loaded_plugins` <nonebot.plugin.get_loaded_plugins>`
|
||||
- `get_available_plugin_names` =>
|
||||
{ref}``get_available_plugin_names` <nonebot.plugin.get_available_plugin_names>`
|
||||
- `get_plugin_config` => {ref}``get_plugin_config` <nonebot.plugin.get_plugin_config>`
|
||||
- `require` => {ref}``require` <nonebot.plugin.load.require>`
|
||||
|
||||
FrontMatter:
|
||||
@@ -47,11 +48,10 @@ from importlib.metadata import version
|
||||
from typing import Any, Dict, Type, Union, TypeVar, Optional, overload
|
||||
|
||||
import loguru
|
||||
from pydantic.env_settings import DotenvType
|
||||
|
||||
from nonebot.config import Env, Config
|
||||
from nonebot.log import logger as logger
|
||||
from nonebot.adapters import Bot, Adapter
|
||||
from nonebot.config import DOTENV_TYPE, Env, Config
|
||||
from nonebot.utils import escape_tag, resolve_dot_notation
|
||||
from nonebot.drivers import Driver, ASGIMixin, combine_driver
|
||||
|
||||
@@ -170,7 +170,7 @@ def get_app() -> Any:
|
||||
|
||||
|
||||
def get_asgi() -> Any:
|
||||
"""获取全局 {ref}`nonebot.drivers.ASGIMixin` 对应
|
||||
"""获取全局 {ref}`nonebot.drivers.ASGIMixin` 对应的
|
||||
[ASGI](https://asgi.readthedocs.io/) 对象。
|
||||
|
||||
返回:
|
||||
@@ -189,7 +189,7 @@ def get_asgi() -> Any:
|
||||
driver = get_driver()
|
||||
assert isinstance(
|
||||
driver, ASGIMixin
|
||||
), "asgi object is only available for reverse driver"
|
||||
), "asgi object is only available for asgi driver"
|
||||
return driver.asgi
|
||||
|
||||
|
||||
@@ -273,7 +273,7 @@ def _log_patcher(record: "loguru.Record"):
|
||||
)
|
||||
|
||||
|
||||
def init(*, _env_file: Optional[DotenvType] = None, **kwargs: Any) -> None:
|
||||
def init(*, _env_file: Optional[DOTENV_TYPE] = None, **kwargs: Any) -> None:
|
||||
"""初始化 NoneBot 以及 全局 {ref}`nonebot.drivers.Driver` 对象。
|
||||
|
||||
NoneBot 将会从 .env 文件中读取环境信息,并使用相应的 env 文件配置。
|
||||
@@ -296,9 +296,11 @@ def init(*, _env_file: Optional[DotenvType] = None, **kwargs: Any) -> None:
|
||||
_env_file = _env_file or f".env.{env.environment}"
|
||||
config = Config(
|
||||
**kwargs,
|
||||
_env_file=(".env", _env_file)
|
||||
if isinstance(_env_file, (str, os.PathLike))
|
||||
else _env_file,
|
||||
_env_file=(
|
||||
(".env", _env_file)
|
||||
if isinstance(_env_file, (str, os.PathLike))
|
||||
else _env_file
|
||||
),
|
||||
)
|
||||
|
||||
logger.configure(
|
||||
@@ -353,10 +355,9 @@ from nonebot.plugin import load_from_json as load_from_json
|
||||
from nonebot.plugin import load_from_toml as load_from_toml
|
||||
from nonebot.plugin import load_all_plugins as load_all_plugins
|
||||
from nonebot.plugin import on_shell_command as on_shell_command
|
||||
from nonebot.plugin import get_plugin_config as get_plugin_config
|
||||
from nonebot.plugin import get_loaded_plugins as get_loaded_plugins
|
||||
from nonebot.plugin import load_builtin_plugin as load_builtin_plugin
|
||||
from nonebot.plugin import load_builtin_plugins as load_builtin_plugins
|
||||
from nonebot.plugin import get_plugin_by_module_name as get_plugin_by_module_name
|
||||
from nonebot.plugin import get_available_plugin_names as get_available_plugin_names
|
||||
|
||||
__autodoc__ = {"internal": False}
|
||||
|
351
nonebot/compat.py
Normal file
351
nonebot/compat.py
Normal file
@@ -0,0 +1,351 @@
|
||||
"""本模块为 Pydantic 版本兼容层模块
|
||||
|
||||
为兼容 Pydantic V1 与 V2 版本,定义了一系列兼容函数与类供使用。
|
||||
|
||||
FrontMatter:
|
||||
sidebar_position: 16
|
||||
description: nonebot.compat 模块
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass, is_dataclass
|
||||
from typing_extensions import Self, Annotated, get_args, get_origin, is_typeddict
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Set,
|
||||
Dict,
|
||||
List,
|
||||
Type,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Optional,
|
||||
Protocol,
|
||||
Generator,
|
||||
)
|
||||
|
||||
from pydantic import VERSION, BaseModel
|
||||
|
||||
from nonebot.typing import origin_is_annotated
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
PYDANTIC_V2 = int(VERSION.split(".", 1)[0]) == 2
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class _CustomValidationClass(Protocol):
|
||||
@classmethod
|
||||
def __get_validators__(cls) -> Generator[Callable[..., Any], None, None]: ...
|
||||
|
||||
CVC = TypeVar("CVC", bound=_CustomValidationClass)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"Required",
|
||||
"PydanticUndefined",
|
||||
"PydanticUndefinedType",
|
||||
"ConfigDict",
|
||||
"DEFAULT_CONFIG",
|
||||
"FieldInfo",
|
||||
"ModelField",
|
||||
"extract_field_info",
|
||||
"model_field_validate",
|
||||
"model_fields",
|
||||
"model_config",
|
||||
"model_dump",
|
||||
"type_validate_python",
|
||||
"custom_validation",
|
||||
)
|
||||
|
||||
__autodoc__ = {
|
||||
"PydanticUndefined": "Pydantic Undefined object",
|
||||
"PydanticUndefinedType": "Pydantic Undefined type",
|
||||
}
|
||||
|
||||
|
||||
if PYDANTIC_V2: # pragma: pydantic-v2
|
||||
from pydantic_core import CoreSchema, core_schema
|
||||
from pydantic._internal._repr import display_as_type
|
||||
from pydantic import TypeAdapter, GetCoreSchemaHandler
|
||||
from pydantic.fields import FieldInfo as BaseFieldInfo
|
||||
|
||||
Required = Ellipsis
|
||||
"""Alias of Ellipsis for compatibility with pydantic v1"""
|
||||
|
||||
# Export undefined type
|
||||
from pydantic_core import PydanticUndefined as PydanticUndefined
|
||||
from pydantic_core import PydanticUndefinedType as PydanticUndefinedType
|
||||
|
||||
# isort: split
|
||||
|
||||
# Export model config dict
|
||||
from pydantic import ConfigDict as ConfigDict
|
||||
|
||||
DEFAULT_CONFIG = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
||||
"""Default config for validations"""
|
||||
|
||||
class FieldInfo(BaseFieldInfo):
|
||||
"""FieldInfo class with extra property for compatibility with pydantic v1"""
|
||||
|
||||
# make default can be positional argument
|
||||
def __init__(self, default: Any = PydanticUndefined, **kwargs: Any) -> None:
|
||||
super().__init__(default=default, **kwargs)
|
||||
|
||||
@property
|
||||
def extra(self) -> Dict[str, Any]:
|
||||
"""Extra data that is not part of the standard pydantic fields.
|
||||
|
||||
For compatibility with pydantic v1.
|
||||
"""
|
||||
# extract extra data from attributes set except used slots
|
||||
# we need to call super in advance due to
|
||||
# comprehension not inlined in cpython < 3.12
|
||||
# https://peps.python.org/pep-0709/
|
||||
slots = super().__slots__
|
||||
return {k: v for k, v in self._attributes_set.items() if k not in slots}
|
||||
|
||||
@dataclass
|
||||
class ModelField:
|
||||
"""ModelField class for compatibility with pydantic v1"""
|
||||
|
||||
name: str
|
||||
"""The name of the field."""
|
||||
annotation: Any
|
||||
"""The annotation of the field."""
|
||||
field_info: FieldInfo
|
||||
"""The FieldInfo of the field."""
|
||||
|
||||
@classmethod
|
||||
def _construct(cls, name: str, annotation: Any, field_info: FieldInfo) -> Self:
|
||||
return cls(name, annotation, field_info)
|
||||
|
||||
@classmethod
|
||||
def construct(
|
||||
cls, name: str, annotation: Any, field_info: Optional[FieldInfo] = None
|
||||
) -> Self:
|
||||
"""Construct a ModelField from given infos."""
|
||||
return cls._construct(name, annotation, field_info or FieldInfo())
|
||||
|
||||
def _annotation_has_config(self) -> bool:
|
||||
"""Check if the annotation has config.
|
||||
|
||||
TypeAdapter raise error when annotation has config
|
||||
and given config is not None.
|
||||
"""
|
||||
type_is_annotated = origin_is_annotated(get_origin(self.annotation))
|
||||
inner_type = (
|
||||
get_args(self.annotation)[0] if type_is_annotated else self.annotation
|
||||
)
|
||||
try:
|
||||
return (
|
||||
issubclass(inner_type, BaseModel)
|
||||
or is_dataclass(inner_type)
|
||||
or is_typeddict(inner_type)
|
||||
)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
def get_default(self) -> Any:
|
||||
"""Get the default value of the field."""
|
||||
return self.field_info.get_default(call_default_factory=True)
|
||||
|
||||
def _type_display(self):
|
||||
"""Get the display of the type of the field."""
|
||||
return display_as_type(self.annotation)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
# Each ModelField is unique for our purposes,
|
||||
# to allow store them in a set.
|
||||
return id(self)
|
||||
|
||||
def extract_field_info(field_info: BaseFieldInfo) -> Dict[str, Any]:
|
||||
"""Get FieldInfo init kwargs from a FieldInfo instance."""
|
||||
|
||||
kwargs = field_info._attributes_set.copy()
|
||||
kwargs["annotation"] = field_info.rebuild_annotation()
|
||||
return kwargs
|
||||
|
||||
def model_field_validate(
|
||||
model_field: ModelField, value: Any, config: Optional[ConfigDict] = None
|
||||
) -> Any:
|
||||
"""Validate the value pass to the field."""
|
||||
type: Any = Annotated[model_field.annotation, model_field.field_info]
|
||||
return TypeAdapter(
|
||||
type, config=None if model_field._annotation_has_config() else config
|
||||
).validate_python(value)
|
||||
|
||||
def model_fields(model: Type[BaseModel]) -> List[ModelField]:
|
||||
"""Get field list of a model."""
|
||||
|
||||
return [
|
||||
ModelField._construct(
|
||||
name=name,
|
||||
annotation=field_info.rebuild_annotation(),
|
||||
field_info=FieldInfo(**extract_field_info(field_info)),
|
||||
)
|
||||
for name, field_info in model.model_fields.items()
|
||||
]
|
||||
|
||||
def model_config(model: Type[BaseModel]) -> Any:
|
||||
"""Get config of a model."""
|
||||
return model.model_config
|
||||
|
||||
def model_dump(
|
||||
model: BaseModel,
|
||||
include: Optional[Set[str]] = None,
|
||||
exclude: Optional[Set[str]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
return model.model_dump(include=include, exclude=exclude)
|
||||
|
||||
def type_validate_python(type_: Type[T], data: Any) -> T:
|
||||
"""Validate data with given type."""
|
||||
return TypeAdapter(type_).validate_python(data)
|
||||
|
||||
def __get_pydantic_core_schema__(
|
||||
cls: Type["_CustomValidationClass"],
|
||||
source_type: Any,
|
||||
handler: GetCoreSchemaHandler,
|
||||
) -> CoreSchema:
|
||||
validators = list(cls.__get_validators__())
|
||||
if len(validators) == 1:
|
||||
return core_schema.no_info_plain_validator_function(validators[0])
|
||||
return core_schema.chain_schema(
|
||||
[core_schema.no_info_plain_validator_function(func) for func in validators]
|
||||
)
|
||||
|
||||
def custom_validation(class_: Type["CVC"]) -> Type["CVC"]:
|
||||
"""Use pydantic v1 like validator generator in pydantic v2"""
|
||||
|
||||
setattr(
|
||||
class_,
|
||||
"__get_pydantic_core_schema__",
|
||||
classmethod(__get_pydantic_core_schema__),
|
||||
)
|
||||
return class_
|
||||
|
||||
else: # pragma: pydantic-v1
|
||||
from pydantic import Extra
|
||||
from pydantic import parse_obj_as
|
||||
from pydantic import BaseConfig as PydanticConfig
|
||||
from pydantic.fields import FieldInfo as BaseFieldInfo
|
||||
from pydantic.fields import ModelField as BaseModelField
|
||||
from pydantic.schema import get_annotation_from_field_info
|
||||
|
||||
# isort: split
|
||||
|
||||
from pydantic.fields import Required as Required
|
||||
|
||||
# isort: split
|
||||
|
||||
from pydantic.fields import Undefined as PydanticUndefined
|
||||
from pydantic.fields import UndefinedType as PydanticUndefinedType
|
||||
|
||||
class ConfigDict(PydanticConfig):
|
||||
"""Config class that allow get value with default value."""
|
||||
|
||||
@classmethod
|
||||
def get(cls, field: str, default: Any = None) -> Any:
|
||||
"""Get a config value."""
|
||||
return getattr(cls, field, default)
|
||||
|
||||
class DEFAULT_CONFIG(ConfigDict):
|
||||
extra = Extra.allow
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
class FieldInfo(BaseFieldInfo):
|
||||
def __init__(self, default: Any = PydanticUndefined, **kwargs: Any):
|
||||
# preprocess default value to make it compatible with pydantic v2
|
||||
# when default is Required, set it to PydanticUndefined
|
||||
if default is Required:
|
||||
default = PydanticUndefined
|
||||
super().__init__(default, **kwargs)
|
||||
|
||||
class ModelField(BaseModelField):
|
||||
@classmethod
|
||||
def _construct(cls, name: str, annotation: Any, field_info: FieldInfo) -> Self:
|
||||
return cls(
|
||||
name=name,
|
||||
type_=annotation,
|
||||
class_validators=None,
|
||||
model_config=DEFAULT_CONFIG,
|
||||
default=field_info.default,
|
||||
default_factory=field_info.default_factory,
|
||||
required=(
|
||||
field_info.default is PydanticUndefined
|
||||
and field_info.default_factory is None
|
||||
),
|
||||
field_info=field_info,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def construct(
|
||||
cls, name: str, annotation: Any, field_info: Optional[FieldInfo] = None
|
||||
) -> Self:
|
||||
"""Construct a ModelField from given infos.
|
||||
|
||||
Field annotation is preprocessed with field_info.
|
||||
"""
|
||||
if field_info is not None:
|
||||
annotation = get_annotation_from_field_info(
|
||||
annotation, field_info, name
|
||||
)
|
||||
return cls._construct(name, annotation, field_info or FieldInfo())
|
||||
|
||||
def extract_field_info(field_info: BaseFieldInfo) -> Dict[str, Any]:
|
||||
"""Get FieldInfo init kwargs from a FieldInfo instance."""
|
||||
|
||||
kwargs = {
|
||||
s: getattr(field_info, s) for s in field_info.__slots__ if s != "extra"
|
||||
}
|
||||
kwargs.update(field_info.extra)
|
||||
return kwargs
|
||||
|
||||
def model_field_validate(
|
||||
model_field: ModelField, value: Any, config: Optional[Type[ConfigDict]] = None
|
||||
) -> Any:
|
||||
"""Validate the value pass to the field.
|
||||
|
||||
Set config before validate to ensure validate correctly.
|
||||
"""
|
||||
|
||||
if model_field.model_config is not config:
|
||||
model_field.set_config(config or ConfigDict)
|
||||
|
||||
v, errs_ = model_field.validate(value, {}, loc=())
|
||||
if errs_:
|
||||
raise ValueError(value, model_field)
|
||||
return v
|
||||
|
||||
def model_fields(model: Type[BaseModel]) -> List[ModelField]:
|
||||
"""Get field list of a model."""
|
||||
|
||||
# construct the model field without preprocess to avoid error
|
||||
return [
|
||||
ModelField._construct(
|
||||
name=model_field.name,
|
||||
annotation=model_field.annotation,
|
||||
field_info=FieldInfo(
|
||||
**extract_field_info(model_field.field_info),
|
||||
),
|
||||
)
|
||||
for model_field in model.__fields__.values()
|
||||
]
|
||||
|
||||
def model_config(model: Type[BaseModel]) -> Any:
|
||||
"""Get config of a model."""
|
||||
return model.__config__
|
||||
|
||||
def model_dump(
|
||||
model: BaseModel,
|
||||
include: Optional[Set[str]] = None,
|
||||
exclude: Optional[Set[str]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
return model.dict(include=include, exclude=exclude)
|
||||
|
||||
def type_validate_python(type_: Type[T], data: Any) -> T:
|
||||
"""Validate data with given type."""
|
||||
return parse_obj_as(type_, data)
|
||||
|
||||
def custom_validation(class_: Type["CVC"]) -> Type["CVC"]:
|
||||
"""Do nothing in pydantic v1"""
|
||||
return class_
|
@@ -12,76 +12,256 @@ FrontMatter:
|
||||
"""
|
||||
|
||||
import os
|
||||
import abc
|
||||
import json
|
||||
from pathlib import Path
|
||||
from datetime import timedelta
|
||||
from ipaddress import IPv4Address
|
||||
from typing import TYPE_CHECKING, Any, Set, Dict, Tuple, Union, Mapping, Optional
|
||||
|
||||
from pydantic.utils import deep_update
|
||||
from pydantic.fields import Undefined, UndefinedType
|
||||
from pydantic import Extra, Field, BaseSettings, IPvAnyAddress
|
||||
from pydantic.env_settings import (
|
||||
DotenvType,
|
||||
SettingsError,
|
||||
EnvSettingsSource,
|
||||
InitSettingsSource,
|
||||
SettingsSourceCallable,
|
||||
from typing_extensions import TypeAlias, get_args, get_origin
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Set,
|
||||
Dict,
|
||||
List,
|
||||
Type,
|
||||
Tuple,
|
||||
Union,
|
||||
Mapping,
|
||||
Optional,
|
||||
)
|
||||
|
||||
from dotenv import dotenv_values
|
||||
from pydantic import Field, BaseModel
|
||||
from pydantic.networks import IPvAnyAddress
|
||||
|
||||
from nonebot.log import logger
|
||||
from nonebot.typing import origin_is_union
|
||||
from nonebot.utils import deep_update, type_is_complex, lenient_issubclass
|
||||
from nonebot.compat import (
|
||||
PYDANTIC_V2,
|
||||
ConfigDict,
|
||||
ModelField,
|
||||
PydanticUndefined,
|
||||
PydanticUndefinedType,
|
||||
model_config,
|
||||
model_fields,
|
||||
)
|
||||
|
||||
DOTENV_TYPE: TypeAlias = Union[
|
||||
Path, str, List[Union[Path, str]], Tuple[Union[Path, str], ...]
|
||||
]
|
||||
|
||||
ENV_FILE_SENTINEL = Path("")
|
||||
|
||||
|
||||
class CustomEnvSettings(EnvSettingsSource):
|
||||
def __call__(self, settings: BaseSettings) -> Dict[str, Any]:
|
||||
class SettingsError(ValueError): ...
|
||||
|
||||
|
||||
class BaseSettingsSource(abc.ABC):
|
||||
def __init__(self, settings_cls: Type["BaseSettings"]) -> None:
|
||||
self.settings_cls = settings_cls
|
||||
|
||||
@property
|
||||
def config(self) -> "SettingsConfig":
|
||||
return model_config(self.settings_cls)
|
||||
|
||||
@abc.abstractmethod
|
||||
def __call__(self) -> Dict[str, Any]:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class InitSettingsSource(BaseSettingsSource):
|
||||
__slots__ = ("init_kwargs",)
|
||||
|
||||
def __init__(
|
||||
self, settings_cls: Type["BaseSettings"], init_kwargs: Dict[str, Any]
|
||||
) -> None:
|
||||
self.init_kwargs = init_kwargs
|
||||
super().__init__(settings_cls)
|
||||
|
||||
def __call__(self) -> Dict[str, Any]:
|
||||
return self.init_kwargs
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"InitSettingsSource(init_kwargs={self.init_kwargs!r})"
|
||||
|
||||
|
||||
class DotEnvSettingsSource(BaseSettingsSource):
|
||||
def __init__(
|
||||
self,
|
||||
settings_cls: Type["BaseSettings"],
|
||||
env_file: Optional[DOTENV_TYPE] = ENV_FILE_SENTINEL,
|
||||
env_file_encoding: Optional[str] = None,
|
||||
case_sensitive: Optional[bool] = None,
|
||||
env_nested_delimiter: Optional[str] = None,
|
||||
) -> None:
|
||||
super().__init__(settings_cls)
|
||||
self.env_file = (
|
||||
env_file
|
||||
if env_file is not ENV_FILE_SENTINEL
|
||||
else self.config.get("env_file", (".env",))
|
||||
)
|
||||
self.env_file_encoding = (
|
||||
env_file_encoding
|
||||
if env_file_encoding is not None
|
||||
else self.config.get("env_file_encoding", "utf-8")
|
||||
)
|
||||
self.case_sensitive = (
|
||||
case_sensitive
|
||||
if case_sensitive is not None
|
||||
else self.config.get("case_sensitive", False)
|
||||
)
|
||||
self.env_nested_delimiter = (
|
||||
env_nested_delimiter
|
||||
if env_nested_delimiter is not None
|
||||
else self.config.get("env_nested_delimiter", None)
|
||||
)
|
||||
|
||||
def _apply_case_sensitive(self, var_name: str) -> str:
|
||||
return var_name if self.case_sensitive else var_name.lower()
|
||||
|
||||
def _field_is_complex(self, field: ModelField) -> Tuple[bool, bool]:
|
||||
if type_is_complex(field.annotation):
|
||||
return True, False
|
||||
elif origin_is_union(get_origin(field.annotation)) and any(
|
||||
type_is_complex(arg) for arg in get_args(field.annotation)
|
||||
):
|
||||
return True, True
|
||||
return False, False
|
||||
|
||||
def _parse_env_vars(
|
||||
self, env_vars: Mapping[str, Optional[str]]
|
||||
) -> Dict[str, Optional[str]]:
|
||||
return {
|
||||
self._apply_case_sensitive(key): value for key, value in env_vars.items()
|
||||
}
|
||||
|
||||
def _read_env_file(self, file_path: Path) -> Dict[str, Optional[str]]:
|
||||
file_vars = dotenv_values(file_path, encoding=self.env_file_encoding)
|
||||
return self._parse_env_vars(file_vars)
|
||||
|
||||
def _read_env_files(self) -> Dict[str, Optional[str]]:
|
||||
env_files = self.env_file
|
||||
if env_files is None:
|
||||
return {}
|
||||
|
||||
if isinstance(env_files, (str, os.PathLike)):
|
||||
env_files = [env_files]
|
||||
|
||||
dotenv_vars: Dict[str, Optional[str]] = {}
|
||||
for env_file in env_files:
|
||||
env_path = Path(env_file).expanduser()
|
||||
if env_path.is_file():
|
||||
dotenv_vars.update(self._read_env_file(env_path))
|
||||
return dotenv_vars
|
||||
|
||||
def _next_field(
|
||||
self, field: Optional[ModelField], key: str
|
||||
) -> Optional[ModelField]:
|
||||
if not field or origin_is_union(get_origin(field.annotation)):
|
||||
return None
|
||||
elif field.annotation and lenient_issubclass(field.annotation, BaseModel):
|
||||
for field in model_fields(field.annotation):
|
||||
if field.name == key:
|
||||
return field
|
||||
return None
|
||||
|
||||
def _explode_env_vars(
|
||||
self,
|
||||
field: ModelField,
|
||||
env_vars: Dict[str, Optional[str]],
|
||||
env_file_vars: Dict[str, Optional[str]],
|
||||
) -> Dict[str, Any]:
|
||||
if self.env_nested_delimiter is None:
|
||||
return {}
|
||||
|
||||
prefix = f"{field.name}{self.env_nested_delimiter}"
|
||||
result: Dict[str, Any] = {}
|
||||
for env_name, env_val in env_vars.items():
|
||||
if not env_name.startswith(prefix):
|
||||
continue
|
||||
|
||||
# delete from file vars when used
|
||||
if env_name in env_file_vars:
|
||||
del env_file_vars[env_name]
|
||||
|
||||
_, *keys, last_key = env_name.split(self.env_nested_delimiter)
|
||||
env_var = result
|
||||
target_field: Optional[ModelField] = field
|
||||
for key in keys:
|
||||
target_field = self._next_field(target_field, key)
|
||||
env_var = env_var.setdefault(key, {})
|
||||
|
||||
target_field = self._next_field(target_field, last_key)
|
||||
if target_field and env_val:
|
||||
is_complex, allow_parse_failure = self._field_is_complex(target_field)
|
||||
if is_complex:
|
||||
try:
|
||||
env_val = json.loads(env_val)
|
||||
except ValueError as e:
|
||||
if not allow_parse_failure:
|
||||
raise SettingsError(
|
||||
f'error parsing env var "{env_name}"'
|
||||
) from e
|
||||
|
||||
env_var[last_key] = env_val
|
||||
|
||||
return result
|
||||
|
||||
def __call__(self) -> Dict[str, Any]:
|
||||
"""从环境变量和 dotenv 配置文件中读取配置项。"""
|
||||
|
||||
d: Dict[str, Any] = {}
|
||||
|
||||
if settings.__config__.case_sensitive:
|
||||
env_vars: Mapping[str, Optional[str]] = os.environ # pragma: no cover
|
||||
else:
|
||||
env_vars = {k.lower(): v for k, v in os.environ.items()}
|
||||
|
||||
env_file_vars = self._read_env_files(settings.__config__.case_sensitive)
|
||||
env_vars = self._parse_env_vars(os.environ)
|
||||
env_file_vars = self._read_env_files()
|
||||
env_vars = {**env_file_vars, **env_vars}
|
||||
|
||||
for field in settings.__fields__.values():
|
||||
env_val: Union[str, None, UndefinedType] = Undefined
|
||||
for env_name in field.field_info.extra["env_names"]:
|
||||
env_val = env_vars.get(env_name, Undefined)
|
||||
if env_name in env_file_vars:
|
||||
del env_file_vars[env_name]
|
||||
if env_val is not Undefined:
|
||||
break
|
||||
for field in model_fields(self.settings_cls):
|
||||
field_name = field.name
|
||||
env_name = self._apply_case_sensitive(field_name)
|
||||
|
||||
is_complex, allow_parse_failure = self.field_is_complex(field)
|
||||
# try get values from env vars
|
||||
env_val = env_vars.get(env_name, PydanticUndefined)
|
||||
# delete from file vars when used
|
||||
if env_name in env_file_vars:
|
||||
del env_file_vars[env_name]
|
||||
|
||||
is_complex, allow_parse_failure = self._field_is_complex(field)
|
||||
if is_complex:
|
||||
if isinstance(env_val, UndefinedType):
|
||||
if isinstance(env_val, PydanticUndefinedType):
|
||||
# field is complex but no value found so far, try explode_env_vars
|
||||
if env_val_built := self.explode_env_vars(field, env_vars):
|
||||
d[field.alias] = env_val_built
|
||||
if env_val_built := self._explode_env_vars(
|
||||
field, env_vars, env_file_vars
|
||||
):
|
||||
d[field_name] = env_val_built
|
||||
elif env_val is None:
|
||||
d[field.alias] = env_val
|
||||
d[field_name] = env_val
|
||||
else:
|
||||
# field is complex and there's a value
|
||||
# decode that as JSON, then add explode_env_vars
|
||||
try:
|
||||
env_val = settings.__config__.parse_env_var(field.name, env_val)
|
||||
env_val = json.loads(env_val)
|
||||
except ValueError as e:
|
||||
if not allow_parse_failure:
|
||||
raise SettingsError(
|
||||
f'error parsing env var "{env_name}"' # type: ignore
|
||||
f'error parsing env var "{env_name}"'
|
||||
) from e
|
||||
|
||||
if isinstance(env_val, dict):
|
||||
d[field.alias] = deep_update(
|
||||
env_val, self.explode_env_vars(field, env_vars)
|
||||
# field value is a dict
|
||||
# try explode_env_vars to find more sub-values
|
||||
d[field_name] = deep_update(
|
||||
env_val,
|
||||
self._explode_env_vars(field, env_vars, env_file_vars),
|
||||
)
|
||||
else:
|
||||
d[field.alias] = env_val
|
||||
elif not isinstance(env_val, UndefinedType):
|
||||
d[field_name] = env_val
|
||||
elif env_val is not PydanticUndefined:
|
||||
# simplest case, field is not complex
|
||||
# we only need to add the value if it was found
|
||||
d[field.alias] = env_val
|
||||
d[field_name] = env_val
|
||||
|
||||
# remain user custom config
|
||||
for env_name in env_file_vars:
|
||||
@@ -89,7 +269,7 @@ class CustomEnvSettings(EnvSettingsSource):
|
||||
if env_val and (val_striped := env_val.strip()):
|
||||
# there's a value, decode that as JSON
|
||||
try:
|
||||
env_val = settings.__config__.parse_env_var(env_name, val_striped)
|
||||
env_val = json.loads(val_striped)
|
||||
except ValueError:
|
||||
logger.trace(
|
||||
"Error while parsing JSON for "
|
||||
@@ -113,38 +293,80 @@ class CustomEnvSettings(EnvSettingsSource):
|
||||
return d
|
||||
|
||||
|
||||
class BaseConfig(BaseSettings):
|
||||
if PYDANTIC_V2: # pragma: pydantic-v2
|
||||
|
||||
class SettingsConfig(ConfigDict, total=False):
|
||||
env_file: Optional[DOTENV_TYPE]
|
||||
env_file_encoding: str
|
||||
case_sensitive: bool
|
||||
env_nested_delimiter: Optional[str]
|
||||
|
||||
else: # pragma: pydantic-v1
|
||||
|
||||
class SettingsConfig(ConfigDict):
|
||||
env_file: Optional[DOTENV_TYPE]
|
||||
env_file_encoding: str
|
||||
case_sensitive: bool
|
||||
env_nested_delimiter: Optional[str]
|
||||
|
||||
|
||||
class BaseSettings(BaseModel):
|
||||
if TYPE_CHECKING:
|
||||
# dummy getattr for pylance checking, actually not used
|
||||
def __getattr__(self, name: str) -> Any: # pragma: no cover
|
||||
return self.__dict__.get(name)
|
||||
|
||||
class Config:
|
||||
extra = Extra.allow
|
||||
env_nested_delimiter = "__"
|
||||
if PYDANTIC_V2: # pragma: pydantic-v2
|
||||
model_config: SettingsConfig = SettingsConfig(
|
||||
extra="allow",
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
case_sensitive=False,
|
||||
env_nested_delimiter="__",
|
||||
)
|
||||
else: # pragma: pydantic-v1
|
||||
|
||||
@classmethod
|
||||
def customise_sources(
|
||||
cls,
|
||||
init_settings: InitSettingsSource,
|
||||
env_settings: EnvSettingsSource,
|
||||
file_secret_settings: SettingsSourceCallable,
|
||||
) -> Tuple[SettingsSourceCallable, ...]:
|
||||
common_config = init_settings.init_kwargs.pop("_common_config", {})
|
||||
return (
|
||||
init_settings,
|
||||
CustomEnvSettings(
|
||||
env_settings.env_file,
|
||||
env_settings.env_file_encoding,
|
||||
env_settings.env_nested_delimiter,
|
||||
env_settings.env_prefix_len,
|
||||
),
|
||||
InitSettingsSource(common_config),
|
||||
file_secret_settings,
|
||||
class Config(SettingsConfig):
|
||||
extra = "allow" # type: ignore
|
||||
env_file = ".env"
|
||||
env_file_encoding = "utf-8"
|
||||
case_sensitive = False
|
||||
env_nested_delimiter = "__"
|
||||
|
||||
def __init__(
|
||||
__settings_self__, # pyright: ignore[reportSelfClsParameterName]
|
||||
_env_file: Optional[DOTENV_TYPE] = ENV_FILE_SENTINEL,
|
||||
_env_file_encoding: Optional[str] = None,
|
||||
_env_nested_delimiter: Optional[str] = None,
|
||||
**values: Any,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
**__settings_self__._settings_build_values(
|
||||
values,
|
||||
env_file=_env_file,
|
||||
env_file_encoding=_env_file_encoding,
|
||||
env_nested_delimiter=_env_nested_delimiter,
|
||||
)
|
||||
)
|
||||
|
||||
def _settings_build_values(
|
||||
self,
|
||||
init_kwargs: Dict[str, Any],
|
||||
env_file: Optional[DOTENV_TYPE] = None,
|
||||
env_file_encoding: Optional[str] = None,
|
||||
env_nested_delimiter: Optional[str] = None,
|
||||
) -> Dict[str, Any]:
|
||||
init_settings = InitSettingsSource(self.__class__, init_kwargs=init_kwargs)
|
||||
env_settings = DotEnvSettingsSource(
|
||||
self.__class__,
|
||||
env_file=env_file,
|
||||
env_file_encoding=env_file_encoding,
|
||||
env_nested_delimiter=env_nested_delimiter,
|
||||
)
|
||||
return deep_update(env_settings(), init_settings())
|
||||
|
||||
|
||||
class Env(BaseConfig):
|
||||
class Env(BaseSettings):
|
||||
"""运行环境配置。大小写不敏感。
|
||||
|
||||
将会从 **环境变量** > **dotenv 配置文件** 的优先级读取环境信息。
|
||||
@@ -156,11 +378,8 @@ class Env(BaseConfig):
|
||||
NoneBot 将从 `.env.{environment}` 文件中加载配置。
|
||||
"""
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
|
||||
|
||||
class Config(BaseConfig):
|
||||
class Config(BaseSettings):
|
||||
"""NoneBot 主要配置。大小写不敏感。
|
||||
|
||||
除了 NoneBot 的配置项外,还可以自行添加配置项到 `.env.{environment}` 文件中。
|
||||
@@ -169,7 +388,8 @@ class Config(BaseConfig):
|
||||
配置方法参考: [配置](https://nonebot.dev/docs/appendices/config)
|
||||
"""
|
||||
|
||||
_env_file: DotenvType = ".env", ".env.prod"
|
||||
if TYPE_CHECKING:
|
||||
_env_file: Optional[DOTENV_TYPE] = ".env", ".env.prod"
|
||||
|
||||
# nonebot configs
|
||||
driver: str = "~fastapi"
|
||||
@@ -254,11 +474,19 @@ class Config(BaseConfig):
|
||||
# custom configs can be assigned during nonebot.init
|
||||
# or from env file using json loads
|
||||
|
||||
class Config:
|
||||
env_file = ".env", ".env.prod"
|
||||
if PYDANTIC_V2: # pragma: pydantic-v2
|
||||
model_config = SettingsConfig(env_file=(".env", ".env.prod"))
|
||||
else: # pragma: pydantic-v1
|
||||
|
||||
class Config(SettingsConfig):
|
||||
env_file = ".env", ".env.prod"
|
||||
|
||||
|
||||
__autodoc__ = {
|
||||
"CustomEnvSettings": False,
|
||||
"BaseConfig": False,
|
||||
"SettingsError": False,
|
||||
"BaseSettingsSource": False,
|
||||
"InitSettingsSource": False,
|
||||
"DotEnvSettingsSource": False,
|
||||
"SettingsConfig": False,
|
||||
"BaseSettings": False,
|
||||
}
|
||||
|
@@ -24,14 +24,11 @@ from typing import (
|
||||
cast,
|
||||
)
|
||||
|
||||
from pydantic import BaseConfig
|
||||
from pydantic.schema import get_annotation_from_field_info
|
||||
from pydantic.fields import Required, FieldInfo, Undefined, ModelField
|
||||
|
||||
from nonebot.log import logger
|
||||
from nonebot.typing import _DependentCallable
|
||||
from nonebot.exception import SkippedException
|
||||
from nonebot.utils import run_sync, is_coroutine_callable
|
||||
from nonebot.compat import FieldInfo, ModelField, PydanticUndefined
|
||||
|
||||
from .utils import check_field_type, get_typed_signature
|
||||
|
||||
@@ -69,10 +66,6 @@ class Param(abc.ABC, FieldInfo):
|
||||
return
|
||||
|
||||
|
||||
class CustomConfig(BaseConfig):
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Dependent(Generic[R]):
|
||||
"""依赖注入容器
|
||||
@@ -125,12 +118,8 @@ class Dependent(Generic[R]):
|
||||
params = get_typed_signature(call).parameters.values()
|
||||
|
||||
for param in params:
|
||||
default_value = Required
|
||||
if param.default != param.empty:
|
||||
default_value = param.default
|
||||
|
||||
if isinstance(default_value, Param):
|
||||
field_info = default_value
|
||||
if isinstance(param.default, Param):
|
||||
field_info = param.default
|
||||
else:
|
||||
for allow_type in allow_types:
|
||||
if field_info := allow_type._check_param(param, allow_types):
|
||||
@@ -141,25 +130,13 @@ class Dependent(Generic[R]):
|
||||
f"for function {call} with type {param.annotation}"
|
||||
)
|
||||
|
||||
default_value = field_info.default
|
||||
|
||||
annotation: Any = Any
|
||||
required = default_value == Required
|
||||
if param.annotation != param.empty:
|
||||
if param.annotation is not param.empty:
|
||||
annotation = param.annotation
|
||||
annotation = get_annotation_from_field_info(
|
||||
annotation, field_info, param.name
|
||||
)
|
||||
|
||||
fields.append(
|
||||
ModelField(
|
||||
name=param.name,
|
||||
type_=annotation,
|
||||
class_validators=None,
|
||||
model_config=CustomConfig,
|
||||
default=None if required else default_value,
|
||||
required=required,
|
||||
field_info=field_info,
|
||||
ModelField.construct(
|
||||
name=param.name, annotation=annotation, field_info=field_info
|
||||
)
|
||||
)
|
||||
|
||||
@@ -207,7 +184,7 @@ class Dependent(Generic[R]):
|
||||
async def _solve_field(self, field: ModelField, params: Dict[str, Any]) -> Any:
|
||||
param = cast(Param, field.field_info)
|
||||
value = await param._solve(**params)
|
||||
if value is Undefined:
|
||||
if value is PydanticUndefined:
|
||||
value = field.get_default()
|
||||
v = check_field_type(field, value)
|
||||
return v if param.validate else value
|
||||
|
@@ -8,10 +8,10 @@ import inspect
|
||||
from typing import Any, Dict, Callable, ForwardRef
|
||||
|
||||
from loguru import logger
|
||||
from pydantic.fields import ModelField
|
||||
from pydantic.typing import evaluate_forwardref
|
||||
|
||||
from nonebot.exception import TypeMisMatch
|
||||
from nonebot.typing import evaluate_forwardref
|
||||
from nonebot.compat import DEFAULT_CONFIG, ModelField, model_field_validate
|
||||
|
||||
|
||||
def get_typed_signature(call: Callable[..., Any]) -> inspect.Signature:
|
||||
@@ -50,7 +50,7 @@ def get_typed_annotation(param: inspect.Parameter, globalns: Dict[str, Any]) ->
|
||||
def check_field_type(field: ModelField, value: Any) -> Any:
|
||||
"""检查字段类型是否匹配"""
|
||||
|
||||
v, errs_ = field.validate(value, {}, loc=())
|
||||
if errs_:
|
||||
try:
|
||||
return model_field_validate(field, value, DEFAULT_CONFIG)
|
||||
except ValueError:
|
||||
raise TypeMisMatch(field, value)
|
||||
return v
|
||||
|
@@ -179,8 +179,7 @@ class WebSocket(BaseWebSocket):
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class Driver(Mixin, NoneDriver):
|
||||
...
|
||||
class Driver(Mixin, NoneDriver): ...
|
||||
|
||||
else:
|
||||
Driver = combine_driver(NoneDriver, Mixin)
|
||||
|
@@ -15,14 +15,13 @@ FrontMatter:
|
||||
description: nonebot.drivers.fastapi 模块
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
import contextlib
|
||||
from functools import wraps
|
||||
from typing_extensions import override
|
||||
from typing import Any, Dict, List, Tuple, Union, Optional
|
||||
|
||||
from pydantic import BaseSettings
|
||||
from pydantic import BaseModel
|
||||
|
||||
from nonebot.config import Env
|
||||
from nonebot.drivers import ASGIMixin
|
||||
@@ -34,8 +33,6 @@ from nonebot.drivers import Request as BaseRequest
|
||||
from nonebot.drivers import WebSocket as BaseWebSocket
|
||||
from nonebot.drivers import HTTPServerSetup, WebSocketServerSetup
|
||||
|
||||
from ._lifespan import LIFESPAN_FUNC, Lifespan
|
||||
|
||||
try:
|
||||
import uvicorn
|
||||
from fastapi.responses import Response
|
||||
@@ -61,7 +58,7 @@ def catch_closed(func):
|
||||
return decorator
|
||||
|
||||
|
||||
class Config(BaseSettings):
|
||||
class Config(BaseModel):
|
||||
"""FastAPI 驱动框架设置,详情参考 FastAPI 文档"""
|
||||
|
||||
fastapi_openapi_url: Optional[str] = None
|
||||
@@ -85,9 +82,6 @@ class Config(BaseSettings):
|
||||
fastapi_extra: Dict[str, Any] = {}
|
||||
"""传递给 `FastAPI` 的其他参数。"""
|
||||
|
||||
class Config:
|
||||
extra = "ignore"
|
||||
|
||||
|
||||
class Driver(BaseDriver, ASGIMixin):
|
||||
"""FastAPI 驱动框架。"""
|
||||
@@ -97,8 +91,6 @@ class Driver(BaseDriver, ASGIMixin):
|
||||
|
||||
self.fastapi_config: Config = Config(**config.dict())
|
||||
|
||||
self._lifespan = Lifespan()
|
||||
|
||||
self._server_app = FastAPI(
|
||||
lifespan=self._lifespan_manager,
|
||||
openapi_url=self.fastapi_config.fastapi_openapi_url,
|
||||
@@ -155,14 +147,6 @@ class Driver(BaseDriver, ASGIMixin):
|
||||
name=setup.name,
|
||||
)
|
||||
|
||||
@override
|
||||
def on_startup(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
return self._lifespan.on_startup(func)
|
||||
|
||||
@override
|
||||
def on_shutdown(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
return self._lifespan.on_shutdown(func)
|
||||
|
||||
@contextlib.asynccontextmanager
|
||||
async def _lifespan_manager(self, app: FastAPI):
|
||||
await self._lifespan.startup()
|
||||
|
@@ -72,8 +72,7 @@ class Mixin(HTTPClientMixin):
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class Driver(Mixin, NoneDriver):
|
||||
...
|
||||
class Driver(Mixin, NoneDriver): ...
|
||||
|
||||
else:
|
||||
Driver = combine_driver(NoneDriver, Mixin)
|
||||
|
@@ -19,8 +19,6 @@ from nonebot.consts import WINDOWS
|
||||
from nonebot.config import Env, Config
|
||||
from nonebot.drivers import Driver as BaseDriver
|
||||
|
||||
from ._lifespan import LIFESPAN_FUNC, Lifespan
|
||||
|
||||
HANDLED_SIGNALS = (
|
||||
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
|
||||
signal.SIGTERM, # Unix signal 15. Sent by `kill <pid>`.
|
||||
@@ -35,8 +33,6 @@ class Driver(BaseDriver):
|
||||
def __init__(self, env: Env, config: Config):
|
||||
super().__init__(env, config)
|
||||
|
||||
self._lifespan = Lifespan()
|
||||
|
||||
self.should_exit: asyncio.Event = asyncio.Event()
|
||||
self.force_exit: bool = False
|
||||
|
||||
@@ -52,16 +48,6 @@ class Driver(BaseDriver):
|
||||
"""none driver 使用的 logger"""
|
||||
return logger
|
||||
|
||||
@override
|
||||
def on_startup(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
"""注册一个启动时执行的函数"""
|
||||
return self._lifespan.on_startup(func)
|
||||
|
||||
@override
|
||||
def on_shutdown(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
"""注册一个停止时执行的函数"""
|
||||
return self._lifespan.on_shutdown(func)
|
||||
|
||||
@override
|
||||
def run(self, *args, **kwargs):
|
||||
"""启动 none driver"""
|
||||
|
@@ -18,20 +18,9 @@ FrontMatter:
|
||||
import asyncio
|
||||
from functools import wraps
|
||||
from typing_extensions import override
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Tuple,
|
||||
Union,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Optional,
|
||||
Coroutine,
|
||||
cast,
|
||||
)
|
||||
from typing import Any, Dict, List, Tuple, Union, Optional, cast
|
||||
|
||||
from pydantic import BaseSettings
|
||||
from pydantic import BaseModel
|
||||
|
||||
from nonebot.config import Env
|
||||
from nonebot.drivers import ASGIMixin
|
||||
@@ -57,8 +46,6 @@ except ModuleNotFoundError as e: # pragma: no cover
|
||||
"Install with pip: `pip install nonebot2[quart]`"
|
||||
) from e
|
||||
|
||||
_AsyncCallable = TypeVar("_AsyncCallable", bound=Callable[..., Coroutine])
|
||||
|
||||
|
||||
def catch_closed(func):
|
||||
@wraps(func)
|
||||
@@ -71,7 +58,7 @@ def catch_closed(func):
|
||||
return decorator
|
||||
|
||||
|
||||
class Config(BaseSettings):
|
||||
class Config(BaseModel):
|
||||
"""Quart 驱动框架设置"""
|
||||
|
||||
quart_reload: bool = False
|
||||
@@ -87,9 +74,6 @@ class Config(BaseSettings):
|
||||
quart_extra: Dict[str, Any] = {}
|
||||
"""传递给 `Quart` 的其他参数。"""
|
||||
|
||||
class Config:
|
||||
extra = "ignore"
|
||||
|
||||
|
||||
class Driver(BaseDriver, ASGIMixin):
|
||||
"""Quart 驱动框架"""
|
||||
@@ -102,6 +86,8 @@ class Driver(BaseDriver, ASGIMixin):
|
||||
self._server_app = Quart(
|
||||
self.__class__.__qualname__, **self.quart_config.quart_extra
|
||||
)
|
||||
self._server_app.before_serving(self._lifespan.startup)
|
||||
self._server_app.after_serving(self._lifespan.shutdown)
|
||||
|
||||
@property
|
||||
@override
|
||||
@@ -150,16 +136,6 @@ class Driver(BaseDriver, ASGIMixin):
|
||||
view_func=_handle,
|
||||
)
|
||||
|
||||
@override
|
||||
def on_startup(self, func: _AsyncCallable) -> _AsyncCallable:
|
||||
"""参考文档: [`Startup and Shutdown`](https://pgjones.gitlab.io/quart/how_to_guides/startup_shutdown.html)"""
|
||||
return self.server_app.before_serving(func) # type: ignore
|
||||
|
||||
@override
|
||||
def on_shutdown(self, func: _AsyncCallable) -> _AsyncCallable:
|
||||
"""参考文档: [`Startup and Shutdown`](https://pgjones.gitlab.io/quart/how_to_guides/startup_shutdown.html)"""
|
||||
return self.server_app.after_serving(func) # type: ignore
|
||||
|
||||
@override
|
||||
def run(
|
||||
self,
|
||||
|
@@ -50,10 +50,7 @@ def catch_closed(func: Callable[P, Awaitable[T]]) -> Callable[P, Awaitable[T]]:
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except ConnectionClosed as e:
|
||||
if e.rcvd_then_sent:
|
||||
raise WebSocketClosed(e.rcvd.code, e.rcvd.reason) # type: ignore
|
||||
else:
|
||||
raise WebSocketClosed(e.sent.code, e.sent.reason) # type: ignore
|
||||
raise WebSocketClosed(e.code, e.reason)
|
||||
|
||||
return decorator
|
||||
|
||||
@@ -131,8 +128,7 @@ class WebSocket(BaseWebSocket):
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class Driver(Mixin, NoneDriver):
|
||||
...
|
||||
class Driver(Mixin, NoneDriver): ...
|
||||
|
||||
else:
|
||||
Driver = combine_driver(NoneDriver, Mixin)
|
||||
|
@@ -31,7 +31,7 @@ FrontMatter:
|
||||
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic.fields import ModelField
|
||||
from nonebot.compat import ModelField
|
||||
|
||||
|
||||
class NoneBotException(Exception):
|
||||
|
@@ -3,6 +3,7 @@ from contextlib import asynccontextmanager
|
||||
from typing import Any, Dict, AsyncGenerator
|
||||
|
||||
from nonebot.config import Config
|
||||
from nonebot.internal.driver._lifespan import LIFESPAN_FUNC
|
||||
from nonebot.internal.driver import (
|
||||
Driver,
|
||||
Request,
|
||||
@@ -97,6 +98,9 @@ class Adapter(abc.ABC):
|
||||
async with self.driver.websocket(setup) as ws:
|
||||
yield ws
|
||||
|
||||
def on_ready(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
return self.driver._lifespan.on_ready(func)
|
||||
|
||||
@abc.abstractmethod
|
||||
async def _call_api(self, bot: Bot, api: str, **data: Any) -> Any:
|
||||
"""`Adapter` 实际调用 api 的逻辑实现函数,实现该方法以调用 api。
|
||||
|
@@ -14,8 +14,7 @@ if TYPE_CHECKING:
|
||||
from .message import Message, MessageSegment
|
||||
|
||||
class _ApiCall(Protocol):
|
||||
async def __call__(self, **kwargs: Any) -> Any:
|
||||
...
|
||||
async def __call__(self, **kwargs: Any) -> Any: ...
|
||||
|
||||
|
||||
class Bot(abc.ABC):
|
||||
@@ -106,7 +105,10 @@ class Bot(abc.ABC):
|
||||
logger.debug("Running CalledAPI hooks...")
|
||||
await asyncio.gather(*coros)
|
||||
except MockApiException as e:
|
||||
# mock api result
|
||||
result = e.result
|
||||
# ignore exception
|
||||
exception = None
|
||||
logger.debug(
|
||||
f"Calling API {api} result is mocked. Return {result} instead."
|
||||
)
|
||||
|
@@ -4,6 +4,7 @@ from typing import Any, Type, TypeVar
|
||||
from pydantic import BaseModel
|
||||
|
||||
from nonebot.utils import DataclassEncoder
|
||||
from nonebot.compat import PYDANTIC_V2, ConfigDict
|
||||
|
||||
from .message import Message
|
||||
|
||||
@@ -13,15 +14,21 @@ E = TypeVar("E", bound="Event")
|
||||
class Event(abc.ABC, BaseModel):
|
||||
"""Event 基类。提供获取关键信息的方法,其余信息可直接获取。"""
|
||||
|
||||
class Config:
|
||||
extra = "allow"
|
||||
json_encoders = {Message: DataclassEncoder}
|
||||
if PYDANTIC_V2: # pragma: pydantic-v2
|
||||
model_config = ConfigDict(extra="allow")
|
||||
else: # pragma: pydantic-v1
|
||||
|
||||
@classmethod
|
||||
def validate(cls: Type["E"], value: Any) -> "E":
|
||||
if isinstance(value, Event) and not isinstance(value, cls):
|
||||
raise TypeError(f"{value} is incompatible with Event type {cls}")
|
||||
return super().validate(value)
|
||||
class Config(ConfigDict):
|
||||
extra = "allow" # type: ignore
|
||||
json_encoders = {Message: DataclassEncoder}
|
||||
|
||||
if not PYDANTIC_V2: # pragma: pydantic-v1
|
||||
|
||||
@classmethod
|
||||
def validate(cls: Type["E"], value: Any) -> "E":
|
||||
if isinstance(value, Event) and not isinstance(value, cls):
|
||||
raise TypeError(f"{value} is incompatible with Event type {cls}")
|
||||
return super().validate(value)
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_type(self) -> str:
|
||||
|
@@ -17,7 +17,7 @@ from typing import (
|
||||
overload,
|
||||
)
|
||||
|
||||
from pydantic import parse_obj_as
|
||||
from nonebot.compat import custom_validation, type_validate_python
|
||||
|
||||
from .template import MessageTemplate
|
||||
|
||||
@@ -25,6 +25,7 @@ TMS = TypeVar("TMS", bound="MessageSegment")
|
||||
TM = TypeVar("TM", bound="Message")
|
||||
|
||||
|
||||
@custom_validation
|
||||
@dataclass
|
||||
class MessageSegment(abc.ABC, Generic[TM]):
|
||||
"""消息段基类"""
|
||||
@@ -65,6 +66,8 @@ class MessageSegment(abc.ABC, Generic[TM]):
|
||||
def _validate(cls, value) -> Self:
|
||||
if isinstance(value, cls):
|
||||
return value
|
||||
if isinstance(value, MessageSegment):
|
||||
raise ValueError(f"Type {type(value)} can not be converted to {cls}")
|
||||
if not isinstance(value, dict):
|
||||
raise ValueError(f"Expected dict for MessageSegment, got {type(value)}")
|
||||
if "type" not in value:
|
||||
@@ -97,6 +100,7 @@ class MessageSegment(abc.ABC, Generic[TM]):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@custom_validation
|
||||
class Message(List[TMS], abc.ABC):
|
||||
"""消息序列
|
||||
|
||||
@@ -158,9 +162,9 @@ class Message(List[TMS], abc.ABC):
|
||||
elif isinstance(value, str):
|
||||
pass
|
||||
elif isinstance(value, dict):
|
||||
value = parse_obj_as(cls.get_segment_class(), value)
|
||||
value = type_validate_python(cls.get_segment_class(), value)
|
||||
elif isinstance(value, Iterable):
|
||||
value = [parse_obj_as(cls.get_segment_class(), v) for v in value]
|
||||
value = [type_validate_python(cls.get_segment_class(), v) for v in value]
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Expected str, dict or iterable for Message, got {type(value)}"
|
||||
|
@@ -20,9 +20,16 @@ from typing import (
|
||||
overload,
|
||||
)
|
||||
|
||||
from _string import formatter_field_name_split # type: ignore
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .message import Message, MessageSegment
|
||||
|
||||
def formatter_field_name_split( # noqa: F811
|
||||
field_name: str,
|
||||
) -> Tuple[str, List[Tuple[bool, str]]]: ...
|
||||
|
||||
|
||||
TM = TypeVar("TM", bound="Message")
|
||||
TF = TypeVar("TF", str, "Message")
|
||||
|
||||
@@ -36,26 +43,35 @@ class MessageTemplate(Formatter, Generic[TF]):
|
||||
参数:
|
||||
template: 模板
|
||||
factory: 消息类型工厂,默认为 `str`
|
||||
private_getattr: 是否允许在模板中访问私有属性,默认为 `False`
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(
|
||||
self: "MessageTemplate[str]", template: str, factory: Type[str] = str
|
||||
) -> None:
|
||||
...
|
||||
self: "MessageTemplate[str]",
|
||||
template: str,
|
||||
factory: Type[str] = str,
|
||||
private_getattr: bool = False,
|
||||
) -> None: ...
|
||||
|
||||
@overload
|
||||
def __init__(
|
||||
self: "MessageTemplate[TM]", template: Union[str, TM], factory: Type[TM]
|
||||
) -> None:
|
||||
...
|
||||
self: "MessageTemplate[TM]",
|
||||
template: Union[str, TM],
|
||||
factory: Type[TM],
|
||||
private_getattr: bool = False,
|
||||
) -> None: ...
|
||||
|
||||
def __init__(
|
||||
self, template: Union[str, TM], factory: Union[Type[str], Type[TM]] = str
|
||||
self,
|
||||
template: Union[str, TM],
|
||||
factory: Union[Type[str], Type[TM]] = str,
|
||||
private_getattr: bool = False,
|
||||
) -> None:
|
||||
self.template: TF = template # type: ignore
|
||||
self.factory: Type[TF] = factory # type: ignore
|
||||
self.format_specs: Dict[str, FormatSpecFunc] = {}
|
||||
self.private_getattr = private_getattr
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"MessageTemplate({self.template!r}, factory={self.factory!r})"
|
||||
@@ -167,6 +183,19 @@ class MessageTemplate(Formatter, Generic[TF]):
|
||||
|
||||
return functools.reduce(self._add, results), auto_arg_index
|
||||
|
||||
def get_field(
|
||||
self, field_name: str, args: Sequence[Any], kwargs: Mapping[str, Any]
|
||||
) -> Tuple[Any, Union[int, str]]:
|
||||
first, rest = formatter_field_name_split(field_name)
|
||||
obj = self.get_value(first, args, kwargs)
|
||||
|
||||
for is_attr, value in rest:
|
||||
if not self.private_getattr and value.startswith("_"):
|
||||
raise ValueError("Cannot access private attribute")
|
||||
obj = getattr(obj, value) if is_attr else obj[value]
|
||||
|
||||
return obj, first
|
||||
|
||||
def format_field(self, value: Any, format_spec: str) -> Any:
|
||||
formatter: Optional[FormatSpecFunc] = self.format_specs.get(format_spec)
|
||||
if formatter is None and not issubclass(self.factory, str):
|
||||
|
@@ -1,30 +1,30 @@
|
||||
from .model import URL as URL
|
||||
from .driver import Mixin as Mixin
|
||||
from .model import RawURL as RawURL
|
||||
from .driver import Driver as Driver
|
||||
from .abstract import Mixin as Mixin
|
||||
from .model import Cookies as Cookies
|
||||
from .model import Request as Request
|
||||
from .abstract import Driver as Driver
|
||||
from .model import FileType as FileType
|
||||
from .model import Response as Response
|
||||
from .model import DataTypes as DataTypes
|
||||
from .model import FileTypes as FileTypes
|
||||
from .model import WebSocket as WebSocket
|
||||
from .driver import ASGIMixin as ASGIMixin
|
||||
from .model import FilesTypes as FilesTypes
|
||||
from .model import QueryTypes as QueryTypes
|
||||
from .abstract import ASGIMixin as ASGIMixin
|
||||
from .model import CookieTypes as CookieTypes
|
||||
from .model import FileContent as FileContent
|
||||
from .model import HTTPVersion as HTTPVersion
|
||||
from .model import HeaderTypes as HeaderTypes
|
||||
from .model import SimpleQuery as SimpleQuery
|
||||
from .model import ContentTypes as ContentTypes
|
||||
from .driver import ForwardMixin as ForwardMixin
|
||||
from .driver import ReverseMixin as ReverseMixin
|
||||
from .model import QueryVariable as QueryVariable
|
||||
from .driver import ForwardDriver as ForwardDriver
|
||||
from .driver import ReverseDriver as ReverseDriver
|
||||
from .driver import combine_driver as combine_driver
|
||||
from .abstract import ForwardMixin as ForwardMixin
|
||||
from .abstract import ReverseMixin as ReverseMixin
|
||||
from .abstract import ForwardDriver as ForwardDriver
|
||||
from .abstract import ReverseDriver as ReverseDriver
|
||||
from .combine import combine_driver as combine_driver
|
||||
from .model import HTTPServerSetup as HTTPServerSetup
|
||||
from .driver import HTTPClientMixin as HTTPClientMixin
|
||||
from .abstract import HTTPClientMixin as HTTPClientMixin
|
||||
from .model import WebSocketServerSetup as WebSocketServerSetup
|
||||
from .driver import WebSocketClientMixin as WebSocketClientMixin
|
||||
from .abstract import WebSocketClientMixin as WebSocketClientMixin
|
||||
|
@@ -11,6 +11,7 @@ LIFESPAN_FUNC: TypeAlias = Union[SYNC_LIFESPAN_FUNC, ASYNC_LIFESPAN_FUNC]
|
||||
class Lifespan:
|
||||
def __init__(self) -> None:
|
||||
self._startup_funcs: List[LIFESPAN_FUNC] = []
|
||||
self._ready_funcs: List[LIFESPAN_FUNC] = []
|
||||
self._shutdown_funcs: List[LIFESPAN_FUNC] = []
|
||||
|
||||
def on_startup(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
@@ -21,6 +22,10 @@ class Lifespan:
|
||||
self._shutdown_funcs.append(func)
|
||||
return func
|
||||
|
||||
def on_ready(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
self._ready_funcs.append(func)
|
||||
return func
|
||||
|
||||
@staticmethod
|
||||
async def _run_lifespan_func(
|
||||
funcs: List[LIFESPAN_FUNC],
|
||||
@@ -35,6 +40,9 @@ class Lifespan:
|
||||
if self._startup_funcs:
|
||||
await self._run_lifespan_func(self._startup_funcs)
|
||||
|
||||
if self._ready_funcs:
|
||||
await self._run_lifespan_func(self._ready_funcs)
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
if self._shutdown_funcs:
|
||||
await self._run_lifespan_func(self._shutdown_funcs)
|
@@ -2,18 +2,7 @@ import abc
|
||||
import asyncio
|
||||
from typing_extensions import TypeAlias
|
||||
from contextlib import AsyncExitStack, asynccontextmanager
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Set,
|
||||
Dict,
|
||||
Type,
|
||||
Union,
|
||||
TypeVar,
|
||||
Callable,
|
||||
AsyncGenerator,
|
||||
overload,
|
||||
)
|
||||
from typing import TYPE_CHECKING, Any, Set, Dict, Type, AsyncGenerator
|
||||
|
||||
from nonebot.log import logger
|
||||
from nonebot.config import Env, Config
|
||||
@@ -27,14 +16,13 @@ from nonebot.typing import (
|
||||
T_BotDisconnectionHook,
|
||||
)
|
||||
|
||||
from ._lifespan import LIFESPAN_FUNC, Lifespan
|
||||
from .model import Request, Response, WebSocket, HTTPServerSetup, WebSocketServerSetup
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from nonebot.internal.adapter import Bot, Adapter
|
||||
|
||||
|
||||
D = TypeVar("D", bound="Driver")
|
||||
|
||||
BOT_HOOK_PARAMS = [DependParam, BotParam, DefaultParam]
|
||||
|
||||
|
||||
@@ -62,6 +50,7 @@ class Driver(abc.ABC):
|
||||
"""全局配置对象"""
|
||||
self._bots: Dict[str, "Bot"] = {}
|
||||
self._bot_tasks: Set[asyncio.Task] = set()
|
||||
self._lifespan = Lifespan()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
@@ -113,15 +102,13 @@ class Driver(abc.ABC):
|
||||
|
||||
self.on_shutdown(self._cleanup)
|
||||
|
||||
@abc.abstractmethod
|
||||
def on_startup(self, func: Callable) -> Callable:
|
||||
"""注册一个在驱动器启动时执行的函数"""
|
||||
raise NotImplementedError
|
||||
def on_startup(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
"""注册一个启动时执行的函数"""
|
||||
return self._lifespan.on_startup(func)
|
||||
|
||||
@abc.abstractmethod
|
||||
def on_shutdown(self, func: Callable) -> Callable:
|
||||
"""注册一个在驱动器停止时执行的函数"""
|
||||
raise NotImplementedError
|
||||
def on_shutdown(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
|
||||
"""注册一个停止时执行的函数"""
|
||||
return self._lifespan.on_shutdown(func)
|
||||
|
||||
@classmethod
|
||||
def on_bot_connect(cls, func: T_BotConnectionHook) -> T_BotConnectionHook:
|
||||
@@ -295,44 +282,3 @@ ReverseDriver: TypeAlias = ReverseMixin
|
||||
|
||||
**Deprecated**,请使用 {ref}`nonebot.drivers.ReverseMixin` 或其子类代替。
|
||||
"""
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class CombinedDriver(Driver, Mixin):
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def combine_driver(driver: Type[D]) -> Type[D]:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def combine_driver(driver: Type[D], *mixins: Type[Mixin]) -> Type["CombinedDriver"]:
|
||||
...
|
||||
|
||||
|
||||
def combine_driver(
|
||||
driver: Type[D], *mixins: Type[Mixin]
|
||||
) -> Union[Type[D], Type["CombinedDriver"]]:
|
||||
"""将一个驱动器和多个混入类合并。"""
|
||||
# check first
|
||||
assert issubclass(driver, Driver), "`driver` must be subclass of Driver"
|
||||
assert all(
|
||||
issubclass(m, Mixin) for m in mixins
|
||||
), "`mixins` must be subclass of Mixin"
|
||||
|
||||
if not mixins:
|
||||
return driver
|
||||
|
||||
def type_(self: "CombinedDriver") -> str:
|
||||
return (
|
||||
driver.type.__get__(self)
|
||||
+ "+"
|
||||
+ "+".join(x.type.__get__(self) for x in mixins)
|
||||
)
|
||||
|
||||
return type(
|
||||
"CombinedDriver", (*mixins, driver), {"type": property(type_)}
|
||||
) # type: ignore
|
42
nonebot/internal/driver/combine.py
Normal file
42
nonebot/internal/driver/combine.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from typing import TYPE_CHECKING, Type, Union, TypeVar, overload
|
||||
|
||||
from .abstract import Mixin, Driver
|
||||
|
||||
D = TypeVar("D", bound="Driver")
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
class CombinedDriver(Driver, Mixin): ...
|
||||
|
||||
|
||||
@overload
|
||||
def combine_driver(driver: Type[D]) -> Type[D]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def combine_driver(driver: Type[D], *mixins: Type[Mixin]) -> Type["CombinedDriver"]: ...
|
||||
|
||||
|
||||
def combine_driver(
|
||||
driver: Type[D], *mixins: Type[Mixin]
|
||||
) -> Union[Type[D], Type["CombinedDriver"]]:
|
||||
"""将一个驱动器和多个混入类合并。"""
|
||||
# check first
|
||||
if not issubclass(driver, Driver):
|
||||
raise TypeError("`driver` must be subclass of Driver")
|
||||
if not all(issubclass(m, Mixin) for m in mixins):
|
||||
raise TypeError("`mixins` must be subclass of Mixin")
|
||||
|
||||
if not mixins:
|
||||
return driver
|
||||
|
||||
def type_(self: "CombinedDriver") -> str:
|
||||
return (
|
||||
driver.type.__get__(self) # type: ignore
|
||||
+ "+"
|
||||
+ "+".join(x.type.__get__(self) for x in mixins) # type: ignore
|
||||
)
|
||||
|
||||
return type(
|
||||
"CombinedDriver", (*mixins, driver), {"type": property(type_)}
|
||||
) # type: ignore
|
@@ -65,12 +65,10 @@ class MatcherManager(MutableMapping[int, List[Type["Matcher"]]]):
|
||||
return self.provider.items()
|
||||
|
||||
@overload
|
||||
def get(self, key: int) -> Optional[List[Type["Matcher"]]]:
|
||||
...
|
||||
def get(self, key: int) -> Optional[List[Type["Matcher"]]]: ...
|
||||
|
||||
@overload
|
||||
def get(self, key: int, default: T) -> Union[List[Type["Matcher"]], T]:
|
||||
...
|
||||
def get(self, key: int, default: T) -> Union[List[Type["Matcher"]], T]: ...
|
||||
|
||||
def get(
|
||||
self, key: int, default: Optional[T] = None
|
||||
|
@@ -262,16 +262,20 @@ class Matcher(metaclass=MatcherMeta):
|
||||
"type": type_,
|
||||
"rule": rule or Rule(),
|
||||
"permission": permission or Permission(),
|
||||
"handlers": [
|
||||
handler
|
||||
if isinstance(handler, Dependent)
|
||||
else Dependent[Any].parse(
|
||||
call=handler, allow_types=cls.HANDLER_PARAM_TYPES
|
||||
)
|
||||
for handler in handlers
|
||||
]
|
||||
if handlers
|
||||
else [],
|
||||
"handlers": (
|
||||
[
|
||||
(
|
||||
handler
|
||||
if isinstance(handler, Dependent)
|
||||
else Dependent[Any].parse(
|
||||
call=handler, allow_types=cls.HANDLER_PARAM_TYPES
|
||||
)
|
||||
)
|
||||
for handler in handlers
|
||||
]
|
||||
if handlers
|
||||
else []
|
||||
),
|
||||
"temp": temp,
|
||||
"expire_time": (
|
||||
expire_time
|
||||
@@ -313,7 +317,7 @@ class Matcher(metaclass=MatcherMeta):
|
||||
|
||||
matchers[priority].append(NewMatcher)
|
||||
|
||||
return NewMatcher
|
||||
return NewMatcher # type: ignore
|
||||
|
||||
@classmethod
|
||||
def destroy(cls) -> None:
|
||||
@@ -658,12 +662,10 @@ class Matcher(metaclass=MatcherMeta):
|
||||
raise SkippedException
|
||||
|
||||
@overload
|
||||
def get_receive(self, id: str) -> Union[Event, None]:
|
||||
...
|
||||
def get_receive(self, id: str) -> Union[Event, None]: ...
|
||||
|
||||
@overload
|
||||
def get_receive(self, id: str, default: T) -> Union[Event, T]:
|
||||
...
|
||||
def get_receive(self, id: str, default: T) -> Union[Event, T]: ...
|
||||
|
||||
def get_receive(
|
||||
self, id: str, default: Optional[T] = None
|
||||
@@ -680,12 +682,10 @@ class Matcher(metaclass=MatcherMeta):
|
||||
self.state[LAST_RECEIVE_KEY] = event
|
||||
|
||||
@overload
|
||||
def get_last_receive(self) -> Union[Event, None]:
|
||||
...
|
||||
def get_last_receive(self) -> Union[Event, None]: ...
|
||||
|
||||
@overload
|
||||
def get_last_receive(self, default: T) -> Union[Event, T]:
|
||||
...
|
||||
def get_last_receive(self, default: T) -> Union[Event, T]: ...
|
||||
|
||||
def get_last_receive(
|
||||
self, default: Optional[T] = None
|
||||
@@ -697,12 +697,10 @@ class Matcher(metaclass=MatcherMeta):
|
||||
return self.state.get(LAST_RECEIVE_KEY, default)
|
||||
|
||||
@overload
|
||||
def get_arg(self, key: str) -> Union[Message, None]:
|
||||
...
|
||||
def get_arg(self, key: str) -> Union[Message, None]: ...
|
||||
|
||||
@overload
|
||||
def get_arg(self, key: str, default: T) -> Union[Message, T]:
|
||||
...
|
||||
def get_arg(self, key: str, default: T) -> Union[Message, T]: ...
|
||||
|
||||
def get_arg(
|
||||
self, key: str, default: Optional[T] = None
|
||||
@@ -724,12 +722,10 @@ class Matcher(metaclass=MatcherMeta):
|
||||
self.state[REJECT_TARGET] = target
|
||||
|
||||
@overload
|
||||
def get_target(self) -> Union[str, None]:
|
||||
...
|
||||
def get_target(self) -> Union[str, None]: ...
|
||||
|
||||
@overload
|
||||
def get_target(self, default: T) -> Union[str, T]:
|
||||
...
|
||||
def get_target(self, default: T) -> Union[str, T]: ...
|
||||
|
||||
def get_target(self, default: Optional[T] = None) -> Optional[Union[str, T]]:
|
||||
return self.state.get(REJECT_TARGET, default)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import asyncio
|
||||
import inspect
|
||||
from typing_extensions import Self, Annotated, override
|
||||
from contextlib import AsyncExitStack, contextmanager, asynccontextmanager
|
||||
from typing_extensions import Self, Annotated, get_args, override, get_origin
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
@@ -14,12 +14,12 @@ from typing import (
|
||||
cast,
|
||||
)
|
||||
|
||||
from pydantic.typing import get_args, get_origin
|
||||
from pydantic.fields import Required, FieldInfo, Undefined, ModelField
|
||||
from pydantic.fields import FieldInfo as PydanticFieldInfo
|
||||
|
||||
from nonebot.dependencies import Param, Dependent
|
||||
from nonebot.dependencies.utils import check_field_type
|
||||
from nonebot.dependencies import Param, Dependent, CustomConfig
|
||||
from nonebot.typing import T_State, T_Handler, T_DependencyCache
|
||||
from nonebot.compat import FieldInfo, ModelField, PydanticUndefined, extract_field_info
|
||||
from nonebot.utils import (
|
||||
get_name,
|
||||
run_sync,
|
||||
@@ -34,23 +34,6 @@ if TYPE_CHECKING:
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot.adapters import Bot, Event
|
||||
|
||||
EXTRA_FIELD_INFO = (
|
||||
"gt",
|
||||
"lt",
|
||||
"ge",
|
||||
"le",
|
||||
"multiple_of",
|
||||
"allow_inf_nan",
|
||||
"max_digits",
|
||||
"decimal_places",
|
||||
"min_items",
|
||||
"max_items",
|
||||
"unique_items",
|
||||
"min_length",
|
||||
"max_length",
|
||||
"regex",
|
||||
)
|
||||
|
||||
|
||||
class DependsInner:
|
||||
def __init__(
|
||||
@@ -58,7 +41,7 @@ class DependsInner:
|
||||
dependency: Optional[T_Handler] = None,
|
||||
*,
|
||||
use_cache: bool = True,
|
||||
validate: Union[bool, FieldInfo] = False,
|
||||
validate: Union[bool, PydanticFieldInfo] = False,
|
||||
) -> None:
|
||||
self.dependency = dependency
|
||||
self.use_cache = use_cache
|
||||
@@ -75,7 +58,7 @@ def Depends(
|
||||
dependency: Optional[T_Handler] = None,
|
||||
*,
|
||||
use_cache: bool = True,
|
||||
validate: Union[bool, FieldInfo] = False,
|
||||
validate: Union[bool, PydanticFieldInfo] = False,
|
||||
) -> Any:
|
||||
"""子依赖装饰器
|
||||
|
||||
@@ -113,24 +96,32 @@ class DependParam(Param):
|
||||
本注入应该具有最高优先级,因此应该在其他参数之前检查。
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, *args, dependent: Dependent, use_cache: bool, **kwargs: Any
|
||||
) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.dependent = dependent
|
||||
self.use_cache = use_cache
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Depends({self.extra['dependent']})"
|
||||
return f"Depends({self.dependent}, use_cache={self.use_cache})"
|
||||
|
||||
@classmethod
|
||||
def _from_field(
|
||||
cls, sub_dependent: Dependent, use_cache: bool, validate: Union[bool, FieldInfo]
|
||||
cls,
|
||||
sub_dependent: Dependent,
|
||||
use_cache: bool,
|
||||
validate: Union[bool, PydanticFieldInfo],
|
||||
) -> Self:
|
||||
kwargs = {}
|
||||
if isinstance(validate, FieldInfo):
|
||||
kwargs.update((k, getattr(validate, k)) for k in EXTRA_FIELD_INFO)
|
||||
if isinstance(validate, PydanticFieldInfo):
|
||||
kwargs.update(extract_field_info(validate))
|
||||
|
||||
return cls(
|
||||
Required,
|
||||
validate=bool(validate),
|
||||
**kwargs,
|
||||
dependent=sub_dependent,
|
||||
use_cache=use_cache,
|
||||
)
|
||||
kwargs["validate"] = bool(validate)
|
||||
kwargs["dependent"] = sub_dependent
|
||||
kwargs["use_cache"] = use_cache
|
||||
|
||||
return cls(**kwargs)
|
||||
|
||||
@classmethod
|
||||
@override
|
||||
@@ -142,7 +133,7 @@ class DependParam(Param):
|
||||
if get_origin(param.annotation) is Annotated:
|
||||
type_annotation, *extra_args = get_args(param.annotation)
|
||||
depends_inner = next(
|
||||
(x for x in extra_args if isinstance(x, DependsInner)), None
|
||||
(x for x in reversed(extra_args) if isinstance(x, DependsInner)), None
|
||||
)
|
||||
|
||||
# param default value takes higher priority
|
||||
@@ -191,10 +182,10 @@ class DependParam(Param):
|
||||
dependency_cache: Optional[T_DependencyCache] = None,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
use_cache: bool = self.extra["use_cache"]
|
||||
use_cache: bool = self.use_cache
|
||||
dependency_cache = {} if dependency_cache is None else dependency_cache
|
||||
|
||||
sub_dependent: Dependent = self.extra["dependent"]
|
||||
sub_dependent: Dependent = self.dependent
|
||||
call = cast(Callable[..., Any], sub_dependent.call)
|
||||
|
||||
# solve sub dependency with current cache
|
||||
@@ -231,8 +222,7 @@ class DependParam(Param):
|
||||
@override
|
||||
async def _check(self, **kwargs: Any) -> None:
|
||||
# run sub dependent pre-checkers
|
||||
sub_dependent: Dependent = self.extra["dependent"]
|
||||
await sub_dependent.check(**kwargs)
|
||||
await self.dependent.check(**kwargs)
|
||||
|
||||
|
||||
class BotParam(Param):
|
||||
@@ -243,14 +233,16 @@ class BotParam(Param):
|
||||
为保证兼容性,本注入还会解析名为 `bot` 且没有类型注解的参数。
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, *args, checker: Optional[ModelField] = None, **kwargs: Any
|
||||
) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.checker = checker
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
"BotParam("
|
||||
+ (
|
||||
repr(cast(ModelField, checker).type_)
|
||||
if (checker := self.extra.get("checker"))
|
||||
else ""
|
||||
)
|
||||
+ (repr(self.checker.annotation) if self.checker is not None else "")
|
||||
+ ")"
|
||||
)
|
||||
|
||||
@@ -265,18 +257,13 @@ class BotParam(Param):
|
||||
if generic_check_issubclass(param.annotation, Bot):
|
||||
checker: Optional[ModelField] = None
|
||||
if param.annotation is not Bot:
|
||||
checker = ModelField(
|
||||
name=param.name,
|
||||
type_=param.annotation,
|
||||
class_validators=None,
|
||||
model_config=CustomConfig,
|
||||
default=None,
|
||||
required=True,
|
||||
checker = ModelField.construct(
|
||||
name=param.name, annotation=param.annotation, field_info=FieldInfo()
|
||||
)
|
||||
return cls(Required, checker=checker)
|
||||
return cls(checker=checker)
|
||||
# legacy: param is named "bot" and has no type annotation
|
||||
elif param.annotation == param.empty and param.name == "bot":
|
||||
return cls(Required)
|
||||
return cls()
|
||||
|
||||
@override
|
||||
async def _solve(self, bot: "Bot", **kwargs: Any) -> Any:
|
||||
@@ -284,8 +271,8 @@ class BotParam(Param):
|
||||
|
||||
@override
|
||||
async def _check(self, bot: "Bot", **kwargs: Any) -> None:
|
||||
if checker := self.extra.get("checker"):
|
||||
check_field_type(checker, bot)
|
||||
if self.checker is not None:
|
||||
check_field_type(self.checker, bot)
|
||||
|
||||
|
||||
class EventParam(Param):
|
||||
@@ -296,14 +283,16 @@ class EventParam(Param):
|
||||
为保证兼容性,本注入还会解析名为 `event` 且没有类型注解的参数。
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, *args, checker: Optional[ModelField] = None, **kwargs: Any
|
||||
) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.checker = checker
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
"EventParam("
|
||||
+ (
|
||||
repr(cast(ModelField, checker).type_)
|
||||
if (checker := self.extra.get("checker"))
|
||||
else ""
|
||||
)
|
||||
+ (repr(self.checker.annotation) if self.checker is not None else "")
|
||||
+ ")"
|
||||
)
|
||||
|
||||
@@ -318,18 +307,13 @@ class EventParam(Param):
|
||||
if generic_check_issubclass(param.annotation, Event):
|
||||
checker: Optional[ModelField] = None
|
||||
if param.annotation is not Event:
|
||||
checker = ModelField(
|
||||
name=param.name,
|
||||
type_=param.annotation,
|
||||
class_validators=None,
|
||||
model_config=CustomConfig,
|
||||
default=None,
|
||||
required=True,
|
||||
checker = ModelField.construct(
|
||||
name=param.name, annotation=param.annotation, field_info=FieldInfo()
|
||||
)
|
||||
return cls(Required, checker=checker)
|
||||
return cls(checker=checker)
|
||||
# legacy: param is named "event" and has no type annotation
|
||||
elif param.annotation == param.empty and param.name == "event":
|
||||
return cls(Required)
|
||||
return cls()
|
||||
|
||||
@override
|
||||
async def _solve(self, event: "Event", **kwargs: Any) -> Any:
|
||||
@@ -337,8 +321,8 @@ class EventParam(Param):
|
||||
|
||||
@override
|
||||
async def _check(self, event: "Event", **kwargs: Any) -> Any:
|
||||
if checker := self.extra.get("checker", None):
|
||||
check_field_type(checker, event)
|
||||
if self.checker is not None:
|
||||
check_field_type(self.checker, event)
|
||||
|
||||
|
||||
class StateParam(Param):
|
||||
@@ -359,10 +343,10 @@ class StateParam(Param):
|
||||
) -> Optional[Self]:
|
||||
# param type is T_State
|
||||
if param.annotation is T_State:
|
||||
return cls(Required)
|
||||
return cls()
|
||||
# legacy: param is named "state" and has no type annotation
|
||||
elif param.annotation == param.empty and param.name == "state":
|
||||
return cls(Required)
|
||||
return cls()
|
||||
|
||||
@override
|
||||
async def _solve(self, state: T_State, **kwargs: Any) -> Any:
|
||||
@@ -377,8 +361,18 @@ class MatcherParam(Param):
|
||||
为保证兼容性,本注入还会解析名为 `matcher` 且没有类型注解的参数。
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, *args, checker: Optional[ModelField] = None, **kwargs: Any
|
||||
) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.checker = checker
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "MatcherParam()"
|
||||
return (
|
||||
"MatcherParam("
|
||||
+ (repr(self.checker.annotation) if self.checker is not None else "")
|
||||
+ ")"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@override
|
||||
@@ -391,18 +385,13 @@ class MatcherParam(Param):
|
||||
if generic_check_issubclass(param.annotation, Matcher):
|
||||
checker: Optional[ModelField] = None
|
||||
if param.annotation is not Matcher:
|
||||
checker = ModelField(
|
||||
name=param.name,
|
||||
type_=param.annotation,
|
||||
class_validators=None,
|
||||
model_config=CustomConfig,
|
||||
default=None,
|
||||
required=True,
|
||||
checker = ModelField.construct(
|
||||
name=param.name, annotation=param.annotation, field_info=FieldInfo()
|
||||
)
|
||||
return cls(Required, checker=checker)
|
||||
return cls(checker=checker)
|
||||
# legacy: param is named "matcher" and has no type annotation
|
||||
elif param.annotation == param.empty and param.name == "matcher":
|
||||
return cls(Required)
|
||||
return cls()
|
||||
|
||||
@override
|
||||
async def _solve(self, matcher: "Matcher", **kwargs: Any) -> Any:
|
||||
@@ -410,16 +399,16 @@ class MatcherParam(Param):
|
||||
|
||||
@override
|
||||
async def _check(self, matcher: "Matcher", **kwargs: Any) -> Any:
|
||||
if checker := self.extra.get("checker", None):
|
||||
check_field_type(checker, matcher)
|
||||
if self.checker is not None:
|
||||
check_field_type(self.checker, matcher)
|
||||
|
||||
|
||||
class ArgInner:
|
||||
def __init__(
|
||||
self, key: Optional[str], type: Literal["message", "str", "plaintext"]
|
||||
) -> None:
|
||||
self.key = key
|
||||
self.type = type
|
||||
self.key: Optional[str] = key
|
||||
self.type: Literal["message", "str", "plaintext"] = type
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"ArgInner(key={self.key!r}, type={self.type!r})"
|
||||
@@ -449,8 +438,19 @@ class ArgParam(Param):
|
||||
留空则会根据参数名称获取。
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
key: str,
|
||||
type: Literal["message", "str", "plaintext"],
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.key = key
|
||||
self.type = type
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"ArgParam(key={self.extra['key']!r}, type={self.extra['type']!r})"
|
||||
return f"ArgParam(key={self.key!r}, type={self.type!r})"
|
||||
|
||||
@classmethod
|
||||
@override
|
||||
@@ -458,22 +458,19 @@ class ArgParam(Param):
|
||||
cls, param: inspect.Parameter, allow_types: Tuple[Type[Param], ...]
|
||||
) -> Optional[Self]:
|
||||
if isinstance(param.default, ArgInner):
|
||||
return cls(
|
||||
Required, key=param.default.key or param.name, type=param.default.type
|
||||
)
|
||||
return cls(key=param.default.key or param.name, type=param.default.type)
|
||||
elif get_origin(param.annotation) is Annotated:
|
||||
for arg in get_args(param.annotation):
|
||||
for arg in get_args(param.annotation)[:0:-1]:
|
||||
if isinstance(arg, ArgInner):
|
||||
return cls(Required, key=arg.key or param.name, type=arg.type)
|
||||
return cls(key=arg.key or param.name, type=arg.type)
|
||||
|
||||
async def _solve(self, matcher: "Matcher", **kwargs: Any) -> Any:
|
||||
key: str = self.extra["key"]
|
||||
message = matcher.get_arg(key)
|
||||
message = matcher.get_arg(self.key)
|
||||
if message is None:
|
||||
return message
|
||||
if self.extra["type"] == "message":
|
||||
if self.type == "message":
|
||||
return message
|
||||
elif self.extra["type"] == "str":
|
||||
elif self.type == "str":
|
||||
return str(message)
|
||||
else:
|
||||
return message.extract_plain_text()
|
||||
@@ -497,10 +494,10 @@ class ExceptionParam(Param):
|
||||
) -> Optional[Self]:
|
||||
# param type is Exception(s) or subclass(es) of Exception or None
|
||||
if generic_check_issubclass(param.annotation, Exception):
|
||||
return cls(Required)
|
||||
return cls()
|
||||
# legacy: param is named "exception" and has no type annotation
|
||||
elif param.annotation == param.empty and param.name == "exception":
|
||||
return cls(Required)
|
||||
return cls()
|
||||
|
||||
@override
|
||||
async def _solve(self, exception: Optional[Exception] = None, **kwargs: Any) -> Any:
|
||||
@@ -524,11 +521,11 @@ class DefaultParam(Param):
|
||||
cls, param: inspect.Parameter, allow_types: Tuple[Type[Param], ...]
|
||||
) -> Optional[Self]:
|
||||
if param.default != param.empty:
|
||||
return cls(param.default)
|
||||
return cls(default=param.default)
|
||||
|
||||
@override
|
||||
async def _solve(self, **kwargs: Any) -> Any:
|
||||
return Undefined
|
||||
return PydanticUndefined
|
||||
|
||||
|
||||
__autodoc__ = {
|
||||
|
@@ -39,10 +39,12 @@ class Permission:
|
||||
|
||||
def __init__(self, *checkers: Union[T_PermissionChecker, Dependent[bool]]) -> None:
|
||||
self.checkers: Set[Dependent[bool]] = {
|
||||
checker
|
||||
if isinstance(checker, Dependent)
|
||||
else Dependent[bool].parse(
|
||||
call=checker, allow_types=self.HANDLER_PARAM_TYPES
|
||||
(
|
||||
checker
|
||||
if isinstance(checker, Dependent)
|
||||
else Dependent[bool].parse(
|
||||
call=checker, allow_types=self.HANDLER_PARAM_TYPES
|
||||
)
|
||||
)
|
||||
for checker in checkers
|
||||
}
|
||||
|
@@ -38,10 +38,12 @@ class Rule:
|
||||
|
||||
def __init__(self, *checkers: Union[T_RuleChecker, Dependent[bool]]) -> None:
|
||||
self.checkers: Set[Dependent[bool]] = {
|
||||
checker
|
||||
if isinstance(checker, Dependent)
|
||||
else Dependent[bool].parse(
|
||||
call=checker, allow_types=self.HANDLER_PARAM_TYPES
|
||||
(
|
||||
checker
|
||||
if isinstance(checker, Dependent)
|
||||
else Dependent[bool].parse(
|
||||
call=checker, allow_types=self.HANDLER_PARAM_TYPES
|
||||
)
|
||||
)
|
||||
for checker in checkers
|
||||
}
|
||||
|
@@ -358,9 +358,18 @@ async def _check_matcher(
|
||||
return False
|
||||
|
||||
try:
|
||||
if not await Matcher.check_perm(
|
||||
bot, event, stack, dependency_cache
|
||||
) or not await Matcher.check_rule(bot, event, state, stack, dependency_cache):
|
||||
if not await Matcher.check_perm(bot, event, stack, dependency_cache):
|
||||
logger.trace(f"Permission conditions not met for {Matcher}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.opt(colors=True, exception=e).error(
|
||||
f"<r><bg #f8bbd0>Permission check failed for {Matcher}.</bg #f8bbd0></r>"
|
||||
)
|
||||
return False
|
||||
|
||||
try:
|
||||
if not await Matcher.check_rule(bot, event, state, stack, dependency_cache):
|
||||
logger.trace(f"Rule conditions not met for {Matcher}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.opt(colors=True, exception=e).error(
|
||||
|
@@ -5,7 +5,18 @@ FrontMatter:
|
||||
description: nonebot.params 模块
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List, Match, Tuple, Union, Optional
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Match,
|
||||
Tuple,
|
||||
Union,
|
||||
Literal,
|
||||
Callable,
|
||||
Optional,
|
||||
overload,
|
||||
)
|
||||
|
||||
from nonebot.typing import T_State
|
||||
from nonebot.matcher import Matcher
|
||||
@@ -147,13 +158,34 @@ def RegexMatched() -> Match[str]:
|
||||
return Depends(_regex_matched, use_cache=False)
|
||||
|
||||
|
||||
def _regex_str(state: T_State) -> str:
|
||||
return _regex_matched(state).group()
|
||||
def _regex_str(
|
||||
groups: Tuple[Union[str, int], ...]
|
||||
) -> Callable[[T_State], Union[str, Tuple[Union[str, Any], ...], Any]]:
|
||||
def _regex_str_dependency(
|
||||
state: T_State,
|
||||
) -> Union[str, Tuple[Union[str, Any], ...], Any]:
|
||||
return _regex_matched(state).group(*groups)
|
||||
|
||||
return _regex_str_dependency
|
||||
|
||||
|
||||
def RegexStr() -> str:
|
||||
@overload
|
||||
def RegexStr(__group: Literal[0] = 0) -> str: ...
|
||||
|
||||
|
||||
@overload
|
||||
def RegexStr(__group: Union[str, int]) -> Union[str, Any]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def RegexStr(
|
||||
__group1: Union[str, int], __group2: Union[str, int], *groups: Union[str, int]
|
||||
) -> Tuple[Union[str, Any], ...]: ...
|
||||
|
||||
|
||||
def RegexStr(*groups: Union[str, int]) -> Union[str, Tuple[Union[str, Any], ...], Any]:
|
||||
"""正则匹配结果文本"""
|
||||
return Depends(_regex_str, use_cache=False)
|
||||
return Depends(_regex_str(groups), use_cache=False)
|
||||
|
||||
|
||||
def _regex_group(state: T_State) -> Tuple[Any, ...]:
|
||||
|
@@ -29,7 +29,7 @@
|
||||
- `load_builtin_plugins` =>
|
||||
{ref}``load_builtin_plugins` <nonebot.plugin.load.load_builtin_plugins>`
|
||||
- `require` => {ref}``require` <nonebot.plugin.load.require>`
|
||||
- `PluginMetadata` => {ref}``PluginMetadata` <nonebot.plugin.plugin.PluginMetadata>`
|
||||
- `PluginMetadata` => {ref}``PluginMetadata` <nonebot.plugin.model.PluginMetadata>`
|
||||
|
||||
FrontMatter:
|
||||
sidebar_position: 0
|
||||
@@ -39,7 +39,14 @@ FrontMatter:
|
||||
from itertools import chain
|
||||
from types import ModuleType
|
||||
from contextvars import ContextVar
|
||||
from typing import Set, Dict, List, Tuple, Optional
|
||||
from typing import Set, Dict, List, Type, Tuple, TypeVar, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from nonebot import get_driver
|
||||
from nonebot.compat import model_dump, type_validate_python
|
||||
|
||||
C = TypeVar("C", bound=BaseModel)
|
||||
|
||||
_plugins: Dict[str, "Plugin"] = {}
|
||||
_managers: List["PluginManager"] = []
|
||||
@@ -77,7 +84,7 @@ def get_plugin(name: str) -> Optional["Plugin"]:
|
||||
如果为 `load_plugins` 文件夹导入的插件,则为文件(夹)名。
|
||||
|
||||
参数:
|
||||
name: 插件名,即 {ref}`nonebot.plugin.plugin.Plugin.name`。
|
||||
name: 插件名,即 {ref}`nonebot.plugin.model.Plugin.name`。
|
||||
"""
|
||||
return _plugins.get(name)
|
||||
|
||||
@@ -88,7 +95,7 @@ def get_plugin_by_module_name(module_name: str) -> Optional["Plugin"]:
|
||||
如果提供的模块名为某个插件的子模块,同样会返回该插件。
|
||||
|
||||
参数:
|
||||
module_name: 模块名,即 {ref}`nonebot.plugin.plugin.Plugin.module_name`。
|
||||
module_name: 模块名,即 {ref}`nonebot.plugin.model.Plugin.module_name`。
|
||||
"""
|
||||
loaded = {plugin.module_name: plugin for plugin in _plugins.values()}
|
||||
has_parent = True
|
||||
@@ -108,12 +115,17 @@ def get_available_plugin_names() -> Set[str]:
|
||||
return {*chain.from_iterable(manager.available_plugins for manager in _managers)}
|
||||
|
||||
|
||||
def get_plugin_config(config: Type[C]) -> C:
|
||||
"""从全局配置获取当前插件需要的配置项。"""
|
||||
return type_validate_python(config, model_dump(get_driver().config))
|
||||
|
||||
|
||||
from .on import on as on
|
||||
from .manager import PluginManager
|
||||
from .on import on_type as on_type
|
||||
from .model import Plugin as Plugin
|
||||
from .load import require as require
|
||||
from .on import on_regex as on_regex
|
||||
from .plugin import Plugin as Plugin
|
||||
from .on import on_notice as on_notice
|
||||
from .on import on_command as on_command
|
||||
from .on import on_keyword as on_keyword
|
||||
@@ -129,8 +141,8 @@ from .load import load_plugins as load_plugins
|
||||
from .on import on_startswith as on_startswith
|
||||
from .load import load_from_json as load_from_json
|
||||
from .load import load_from_toml as load_from_toml
|
||||
from .model import PluginMetadata as PluginMetadata
|
||||
from .on import on_shell_command as on_shell_command
|
||||
from .plugin import PluginMetadata as PluginMetadata
|
||||
from .load import load_all_plugins as load_all_plugins
|
||||
from .load import load_builtin_plugin as load_builtin_plugin
|
||||
from .load import load_builtin_plugins as load_builtin_plugins
|
||||
|
@@ -12,7 +12,7 @@ from typing import Set, Union, Iterable, Optional
|
||||
|
||||
from nonebot.utils import path_to_module_name
|
||||
|
||||
from .plugin import Plugin
|
||||
from .model import Plugin
|
||||
from .manager import PluginManager
|
||||
from . import _managers, get_plugin, _current_plugin_chain, _module_name_to_plugin_name
|
||||
|
||||
@@ -160,7 +160,7 @@ def require(name: str) -> ModuleType:
|
||||
如果为 `load_plugins` 文件夹导入的插件,则为文件(夹)名。
|
||||
|
||||
参数:
|
||||
name: 插件名,即 {ref}`nonebot.plugin.plugin.Plugin.name`。
|
||||
name: 插件名,即 {ref}`nonebot.plugin.model.Plugin.name`。
|
||||
|
||||
异常:
|
||||
RuntimeError: 插件无法加载
|
||||
@@ -213,8 +213,10 @@ def inherit_supported_adapters(*names: str) -> Optional[Set[str]]:
|
||||
)
|
||||
|
||||
return final_supported and {
|
||||
f"nonebot.adapters.{adapter_name[1:]}"
|
||||
if adapter_name.startswith("~")
|
||||
else adapter_name
|
||||
(
|
||||
f"nonebot.adapters.{adapter_name[1:]}"
|
||||
if adapter_name.startswith("~")
|
||||
else adapter_name
|
||||
)
|
||||
for adapter_name in final_supported
|
||||
}
|
||||
|
@@ -20,7 +20,7 @@ from typing import Set, Dict, List, Iterable, Optional, Sequence
|
||||
from nonebot.log import logger
|
||||
from nonebot.utils import escape_tag, path_to_module_name
|
||||
|
||||
from .plugin import Plugin, PluginMetadata
|
||||
from .model import Plugin, PluginMetadata
|
||||
from . import (
|
||||
_managers,
|
||||
_new_plugin,
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
FrontMatter:
|
||||
sidebar_position: 3
|
||||
description: nonebot.plugin.plugin 模块
|
||||
description: nonebot.plugin.model 模块
|
||||
"""
|
||||
|
||||
import contextlib
|
@@ -30,7 +30,7 @@ from nonebot.rule import (
|
||||
shell_command,
|
||||
)
|
||||
|
||||
from .plugin import Plugin
|
||||
from .model import Plugin
|
||||
from . import get_plugin_by_module_name
|
||||
from .manager import _current_plugin_chain
|
||||
|
||||
|
@@ -10,7 +10,7 @@ from nonebot.rule import Rule, ArgumentParser
|
||||
from nonebot.matcher import Matcher, MatcherSource
|
||||
from nonebot.typing import T_State, T_Handler, T_RuleChecker, T_PermissionChecker
|
||||
|
||||
from .plugin import Plugin
|
||||
from .model import Plugin
|
||||
|
||||
def store_matcher(matcher: type[Matcher]) -> None: ...
|
||||
def get_matcher_plugin(depth: int = ...) -> Plugin | None: ...
|
||||
|
@@ -19,4 +19,5 @@ echo = on_command("echo", to_me())
|
||||
|
||||
@echo.handle()
|
||||
async def handle_echo(message: Message = CommandArg()):
|
||||
await echo.send(message=message)
|
||||
if any((not seg.is_text()) or str(seg) for seg in message):
|
||||
await echo.send(message=message)
|
||||
|
@@ -117,6 +117,11 @@ class TrieRule:
|
||||
# check whitespace
|
||||
arg_str = segment_text[len(pf.key) :]
|
||||
arg_str_stripped = arg_str.lstrip()
|
||||
# check next segment until arg detected or no text remain
|
||||
while not arg_str_stripped and msg and msg[0].is_text():
|
||||
arg_str += str(msg.pop(0))
|
||||
arg_str_stripped = arg_str.lstrip()
|
||||
|
||||
has_arg = arg_str_stripped or msg
|
||||
if (
|
||||
has_arg
|
||||
@@ -455,45 +460,38 @@ class ArgumentParser(ArgParser):
|
||||
self,
|
||||
args: Optional[Sequence[Union[str, MessageSegment]]] = None,
|
||||
namespace: None = None,
|
||||
) -> Tuple[Namespace, List[Union[str, MessageSegment]]]:
|
||||
...
|
||||
) -> Tuple[Namespace, List[Union[str, MessageSegment]]]: ...
|
||||
|
||||
@overload
|
||||
def parse_known_args(
|
||||
self, args: Optional[Sequence[Union[str, MessageSegment]]], namespace: T
|
||||
) -> Tuple[T, List[Union[str, MessageSegment]]]:
|
||||
...
|
||||
) -> Tuple[T, List[Union[str, MessageSegment]]]: ...
|
||||
|
||||
@overload
|
||||
def parse_known_args(
|
||||
self, *, namespace: T
|
||||
) -> Tuple[T, List[Union[str, MessageSegment]]]:
|
||||
...
|
||||
) -> Tuple[T, List[Union[str, MessageSegment]]]: ...
|
||||
|
||||
def parse_known_args(
|
||||
self,
|
||||
args: Optional[Sequence[Union[str, MessageSegment]]] = None,
|
||||
namespace: Optional[T] = None,
|
||||
) -> Tuple[Union[Namespace, T], List[Union[str, MessageSegment]]]:
|
||||
...
|
||||
) -> Tuple[Union[Namespace, T], List[Union[str, MessageSegment]]]: ...
|
||||
|
||||
@overload
|
||||
def parse_args(
|
||||
self,
|
||||
args: Optional[Sequence[Union[str, MessageSegment]]] = None,
|
||||
namespace: None = None,
|
||||
) -> Namespace:
|
||||
...
|
||||
) -> Namespace: ...
|
||||
|
||||
@overload
|
||||
def parse_args(
|
||||
self, args: Optional[Sequence[Union[str, MessageSegment]]], namespace: T
|
||||
) -> T:
|
||||
...
|
||||
) -> T: ...
|
||||
|
||||
@overload
|
||||
def parse_args(self, *, namespace: T) -> T:
|
||||
...
|
||||
def parse_args(self, *, namespace: T) -> T: ...
|
||||
|
||||
def parse_args(
|
||||
self,
|
||||
@@ -599,7 +597,7 @@ def shell_command(
|
||||
通过 {ref}`nonebot.params.ShellCommandArgs` 获取解析后的参数字典
|
||||
(例: `{"arg": "arg", "h": True}`)。
|
||||
|
||||
:::warning 警告
|
||||
:::caution 警告
|
||||
如果参数解析失败,则通过 {ref}`nonebot.params.ShellCommandArgs`
|
||||
获取的将是 {ref}`nonebot.exception.ParserExit` 异常。
|
||||
:::
|
||||
|
@@ -10,18 +10,14 @@ FrontMatter:
|
||||
description: nonebot.typing 模块
|
||||
"""
|
||||
|
||||
import sys
|
||||
import types
|
||||
import warnings
|
||||
from typing_extensions import ParamSpec, TypeAlias, override
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Dict,
|
||||
Union,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Optional,
|
||||
Awaitable,
|
||||
)
|
||||
import contextlib
|
||||
import typing as t
|
||||
import typing_extensions as t_ext
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
from typing_extensions import ParamSpec, TypeAlias, get_args, override, get_origin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from asyncio import Task
|
||||
@@ -32,7 +28,7 @@ if TYPE_CHECKING:
|
||||
T = TypeVar("T")
|
||||
P = ParamSpec("P")
|
||||
|
||||
T_Wrapped: TypeAlias = Callable[P, T]
|
||||
T_Wrapped: TypeAlias = t.Callable[P, T]
|
||||
|
||||
|
||||
def overrides(InterfaceClass: object):
|
||||
@@ -47,14 +43,77 @@ def overrides(InterfaceClass: object):
|
||||
return override
|
||||
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
|
||||
def origin_is_union(origin: t.Optional[t.Type[t.Any]]) -> bool:
|
||||
"""判断是否是 Union 类型"""
|
||||
return origin is t.Union
|
||||
|
||||
else:
|
||||
|
||||
def origin_is_union(origin: t.Optional[t.Type[t.Any]]) -> bool:
|
||||
return origin is t.Union or origin is types.UnionType
|
||||
|
||||
|
||||
def origin_is_literal(origin: t.Optional[t.Type[t.Any]]) -> bool:
|
||||
"""判断是否是 Literal 类型"""
|
||||
return origin is t.Literal or origin is t_ext.Literal
|
||||
|
||||
|
||||
def _literal_values(type_: t.Type[t.Any]) -> t.Tuple[t.Any, ...]:
|
||||
return get_args(type_)
|
||||
|
||||
|
||||
def all_literal_values(type_: t.Type[t.Any]) -> t.List[t.Any]:
|
||||
"""获取 Literal 类型包含的所有值"""
|
||||
if not origin_is_literal(get_origin(type_)):
|
||||
return [type_]
|
||||
|
||||
return [x for value in _literal_values(type_) for x in all_literal_values(value)]
|
||||
|
||||
|
||||
def origin_is_annotated(origin: t.Optional[t.Type[t.Any]]) -> bool:
|
||||
"""判断是否是 Annotated 类型"""
|
||||
with contextlib.suppress(TypeError):
|
||||
return origin is not None and issubclass(origin, t_ext.Annotated)
|
||||
return False
|
||||
|
||||
|
||||
NONE_TYPES = {None, type(None), t.Literal[None], t_ext.Literal[None]}
|
||||
if sys.version_info >= (3, 10):
|
||||
NONE_TYPES.add(types.NoneType)
|
||||
|
||||
|
||||
def is_none_type(type_: t.Type[t.Any]) -> bool:
|
||||
"""判断是否是 None 类型"""
|
||||
return type_ in NONE_TYPES
|
||||
|
||||
|
||||
if sys.version_info < (3, 9): # pragma: py-lt-39
|
||||
|
||||
def evaluate_forwardref(
|
||||
ref: t.ForwardRef, globalns: t.Dict[str, t.Any], localns: t.Dict[str, t.Any]
|
||||
) -> t.Any:
|
||||
return ref._evaluate(globalns, localns)
|
||||
|
||||
else: # pragma: py-gte-39
|
||||
|
||||
def evaluate_forwardref(
|
||||
ref: t.ForwardRef, globalns: t.Dict[str, t.Any], localns: t.Dict[str, t.Any]
|
||||
) -> t.Any:
|
||||
return ref._evaluate(globalns, localns, frozenset())
|
||||
|
||||
|
||||
# state
|
||||
T_State: TypeAlias = Dict[Any, Any]
|
||||
T_State: TypeAlias = t.Dict[t.Any, t.Any]
|
||||
"""事件处理状态 State 类型"""
|
||||
|
||||
_DependentCallable: TypeAlias = Union[Callable[..., T], Callable[..., Awaitable[T]]]
|
||||
_DependentCallable: TypeAlias = t.Union[
|
||||
t.Callable[..., T], t.Callable[..., t.Awaitable[T]]
|
||||
]
|
||||
|
||||
# driver hooks
|
||||
T_BotConnectionHook: TypeAlias = _DependentCallable[Any]
|
||||
T_BotConnectionHook: TypeAlias = _DependentCallable[t.Any]
|
||||
"""Bot 连接建立时钩子函数
|
||||
|
||||
依赖参数:
|
||||
@@ -63,7 +122,7 @@ T_BotConnectionHook: TypeAlias = _DependentCallable[Any]
|
||||
- BotParam: Bot 对象
|
||||
- DefaultParam: 带有默认值的参数
|
||||
"""
|
||||
T_BotDisconnectionHook: TypeAlias = _DependentCallable[Any]
|
||||
T_BotDisconnectionHook: TypeAlias = _DependentCallable[t.Any]
|
||||
"""Bot 连接断开时钩子函数
|
||||
|
||||
依赖参数:
|
||||
@@ -74,15 +133,17 @@ T_BotDisconnectionHook: TypeAlias = _DependentCallable[Any]
|
||||
"""
|
||||
|
||||
# api hooks
|
||||
T_CallingAPIHook: TypeAlias = Callable[["Bot", str, Dict[str, Any]], Awaitable[Any]]
|
||||
T_CallingAPIHook: TypeAlias = t.Callable[
|
||||
["Bot", str, t.Dict[str, t.Any]], t.Awaitable[t.Any]
|
||||
]
|
||||
"""`bot.call_api` 钩子函数"""
|
||||
T_CalledAPIHook: TypeAlias = Callable[
|
||||
["Bot", Optional[Exception], str, Dict[str, Any], Any], Awaitable[Any]
|
||||
T_CalledAPIHook: TypeAlias = t.Callable[
|
||||
["Bot", t.Optional[Exception], str, t.Dict[str, t.Any], t.Any], t.Awaitable[t.Any]
|
||||
]
|
||||
"""`bot.call_api` 后执行的函数,参数分别为 bot, exception, api, data, result"""
|
||||
|
||||
# event hooks
|
||||
T_EventPreProcessor: TypeAlias = _DependentCallable[Any]
|
||||
T_EventPreProcessor: TypeAlias = _DependentCallable[t.Any]
|
||||
"""事件预处理函数 EventPreProcessor 类型
|
||||
|
||||
依赖参数:
|
||||
@@ -93,8 +154,8 @@ T_EventPreProcessor: TypeAlias = _DependentCallable[Any]
|
||||
- StateParam: State 对象
|
||||
- DefaultParam: 带有默认值的参数
|
||||
"""
|
||||
T_EventPostProcessor: TypeAlias = _DependentCallable[Any]
|
||||
"""事件预处理函数 EventPostProcessor 类型
|
||||
T_EventPostProcessor: TypeAlias = _DependentCallable[t.Any]
|
||||
"""事件后处理函数 EventPostProcessor 类型
|
||||
|
||||
依赖参数:
|
||||
|
||||
@@ -106,7 +167,7 @@ T_EventPostProcessor: TypeAlias = _DependentCallable[Any]
|
||||
"""
|
||||
|
||||
# matcher run hooks
|
||||
T_RunPreProcessor: TypeAlias = _DependentCallable[Any]
|
||||
T_RunPreProcessor: TypeAlias = _DependentCallable[t.Any]
|
||||
"""事件响应器运行前预处理函数 RunPreProcessor 类型
|
||||
|
||||
依赖参数:
|
||||
@@ -118,7 +179,7 @@ T_RunPreProcessor: TypeAlias = _DependentCallable[Any]
|
||||
- MatcherParam: Matcher 对象
|
||||
- DefaultParam: 带有默认值的参数
|
||||
"""
|
||||
T_RunPostProcessor: TypeAlias = _DependentCallable[Any]
|
||||
T_RunPostProcessor: TypeAlias = _DependentCallable[t.Any]
|
||||
"""事件响应器运行后后处理函数 RunPostProcessor 类型
|
||||
|
||||
依赖参数:
|
||||
@@ -155,7 +216,7 @@ T_PermissionChecker: TypeAlias = _DependentCallable[bool]
|
||||
- DefaultParam: 带有默认值的参数
|
||||
"""
|
||||
|
||||
T_Handler: TypeAlias = _DependentCallable[Any]
|
||||
T_Handler: TypeAlias = _DependentCallable[t.Any]
|
||||
"""Handler 处理函数。"""
|
||||
T_TypeUpdater: TypeAlias = _DependentCallable[str]
|
||||
"""TypeUpdater 在 Matcher.pause, Matcher.reject 时被运行,用于更新响应的事件类型。
|
||||
@@ -183,5 +244,5 @@ T_PermissionUpdater: TypeAlias = _DependentCallable["Permission"]
|
||||
- MatcherParam: Matcher 对象
|
||||
- DefaultParam: 带有默认值的参数
|
||||
"""
|
||||
T_DependencyCache: TypeAlias = Dict[_DependentCallable[Any], "Task[Any]"]
|
||||
T_DependencyCache: TypeAlias = t.Dict[_DependentCallable[t.Any], "Task[t.Any]"]
|
||||
"""依赖缓存, 用于存储依赖函数的返回值"""
|
||||
|
@@ -12,28 +12,38 @@ import inspect
|
||||
import importlib
|
||||
import dataclasses
|
||||
from pathlib import Path
|
||||
from collections import deque
|
||||
from contextvars import copy_context
|
||||
from functools import wraps, partial
|
||||
from contextlib import asynccontextmanager
|
||||
from typing_extensions import ParamSpec, get_args, override, get_origin
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
Type,
|
||||
Tuple,
|
||||
Union,
|
||||
Generic,
|
||||
Mapping,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Optional,
|
||||
Sequence,
|
||||
Coroutine,
|
||||
AsyncGenerator,
|
||||
ContextManager,
|
||||
overload,
|
||||
)
|
||||
|
||||
from pydantic.typing import is_union, is_none_type
|
||||
from pydantic import BaseModel
|
||||
|
||||
from nonebot.log import logger
|
||||
from nonebot.typing import (
|
||||
is_none_type,
|
||||
origin_is_union,
|
||||
origin_is_literal,
|
||||
all_literal_values,
|
||||
)
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
@@ -53,6 +63,34 @@ def escape_tag(s: str) -> str:
|
||||
return re.sub(r"</?((?:[fb]g\s)?[^<>\s]*)>", r"\\\g<0>", s)
|
||||
|
||||
|
||||
def deep_update(
|
||||
mapping: Dict[K, Any], *updating_mappings: Dict[K, Any]
|
||||
) -> Dict[K, Any]:
|
||||
"""深度更新合并字典"""
|
||||
updated_mapping = mapping.copy()
|
||||
for updating_mapping in updating_mappings:
|
||||
for k, v in updating_mapping.items():
|
||||
if (
|
||||
k in updated_mapping
|
||||
and isinstance(updated_mapping[k], dict)
|
||||
and isinstance(v, dict)
|
||||
):
|
||||
updated_mapping[k] = deep_update(updated_mapping[k], v)
|
||||
else:
|
||||
updated_mapping[k] = v
|
||||
return updated_mapping
|
||||
|
||||
|
||||
def lenient_issubclass(
|
||||
cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...]]
|
||||
) -> bool:
|
||||
"""检查 cls 是否是 class_or_tuple 中的一个类型子类并忽略类型错误。"""
|
||||
try:
|
||||
return isinstance(cls, type) and issubclass(cls, class_or_tuple)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
|
||||
def generic_check_issubclass(
|
||||
cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...]]
|
||||
) -> bool:
|
||||
@@ -62,6 +100,8 @@ def generic_check_issubclass(
|
||||
|
||||
- 如果 cls 是 `typing.Union` 或 `types.UnionType` 类型,
|
||||
则会检查其中的所有类型是否是 class_or_tuple 中一个类型的子类或 None。
|
||||
- 如果 cls 是 `typing.Literal` 类型,
|
||||
则会检查其中的所有值是否是 class_or_tuple 中一个类型的实例。
|
||||
- 如果 cls 是 `typing.TypeVar` 类型,
|
||||
则会检查其 `__bound__` 或 `__constraints__`
|
||||
是否是 class_or_tuple 中一个类型的子类或 None。
|
||||
@@ -70,14 +110,23 @@ def generic_check_issubclass(
|
||||
return issubclass(cls, class_or_tuple)
|
||||
except TypeError:
|
||||
origin = get_origin(cls)
|
||||
if is_union(origin):
|
||||
if origin_is_union(origin):
|
||||
return all(
|
||||
is_none_type(type_) or generic_check_issubclass(type_, class_or_tuple)
|
||||
for type_ in get_args(cls)
|
||||
)
|
||||
elif origin_is_literal(origin):
|
||||
return all(
|
||||
is_none_type(value) or isinstance(value, class_or_tuple)
|
||||
for value in all_literal_values(cls)
|
||||
)
|
||||
# ensure generic List, Dict can be checked
|
||||
elif origin:
|
||||
return issubclass(origin, class_or_tuple)
|
||||
# avoid class check error (typing.Final, typing.ClassVar, etc...)
|
||||
try:
|
||||
return issubclass(origin, class_or_tuple)
|
||||
except TypeError:
|
||||
return False
|
||||
elif isinstance(cls, TypeVar):
|
||||
if cls.__constraints__:
|
||||
return all(
|
||||
@@ -90,6 +139,21 @@ def generic_check_issubclass(
|
||||
return False
|
||||
|
||||
|
||||
def type_is_complex(type_: Type[Any]) -> bool:
|
||||
"""检查 type_ 是否是复杂类型"""
|
||||
origin = get_origin(type_)
|
||||
return _type_is_complex_inner(type_) or _type_is_complex_inner(origin)
|
||||
|
||||
|
||||
def _type_is_complex_inner(type_: Optional[Type[Any]]) -> bool:
|
||||
if lenient_issubclass(type_, (str, bytes)):
|
||||
return False
|
||||
|
||||
return lenient_issubclass(
|
||||
type_, (BaseModel, Mapping, Sequence, tuple, set, frozenset, deque)
|
||||
) or dataclasses.is_dataclass(type_)
|
||||
|
||||
|
||||
def is_coroutine_callable(call: Callable[..., Any]) -> bool:
|
||||
"""检查 call 是否是一个 callable 协程函数"""
|
||||
if inspect.isroutine(call):
|
||||
@@ -154,8 +218,7 @@ async def run_coro_with_catch(
|
||||
coro: Coroutine[Any, Any, T],
|
||||
exc: Tuple[Type[Exception], ...],
|
||||
return_on_err: None = None,
|
||||
) -> Union[T, None]:
|
||||
...
|
||||
) -> Union[T, None]: ...
|
||||
|
||||
|
||||
@overload
|
||||
@@ -163,8 +226,7 @@ async def run_coro_with_catch(
|
||||
coro: Coroutine[Any, Any, T],
|
||||
exc: Tuple[Type[Exception], ...],
|
||||
return_on_err: R,
|
||||
) -> Union[T, R]:
|
||||
...
|
||||
) -> Union[T, R]: ...
|
||||
|
||||
|
||||
async def run_coro_with_catch(
|
||||
|
23
package.json
23
package.json
@@ -12,11 +12,30 @@
|
||||
"serve": "yarn workspace nonebot serve",
|
||||
"clear": "yarn workspace nonebot clear",
|
||||
"prettier": "prettier --config ./.prettierrc --write \"./website/\"",
|
||||
"lint": "yarn lint:js && yarn lint:style",
|
||||
"lint:js": "eslint --cache --report-unused-disable-directives \"**/*.{js,jsx,ts,tsx,mjs}\"",
|
||||
"lint:js:fix": "eslint --cache --report-unused-disable-directives --fix \"**/*.{js,jsx,ts,tsx,mjs}\"",
|
||||
"lint:style": "stylelint \"**/*.css\"",
|
||||
"lint:style:fix": "stylelint --fix \"**/*.css\"",
|
||||
"pyright": "pyright"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@typescript-eslint/eslint-plugin": "^6.6.0",
|
||||
"@typescript-eslint/parser": "^6.6.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"prettier": "^2.5.0",
|
||||
"pyright": "^1.1.317"
|
||||
"eslint": "^8.48.0",
|
||||
"eslint-config-prettier": "^9.0.0",
|
||||
"eslint-import-resolver-typescript": "^3.6.0",
|
||||
"eslint-plugin-import": "^2.28.1",
|
||||
"eslint-plugin-jsx-a11y": "^6.7.1",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"eslint-plugin-react": "^7.33.2",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-regexp": "^1.15.0",
|
||||
"prettier": "^3.0.3",
|
||||
"pyright": "^1.1.317",
|
||||
"stylelint": "^15.10.3",
|
||||
"stylelint-config-standard": "^34.0.0",
|
||||
"stylelint-prettier": "^4.0.2"
|
||||
}
|
||||
}
|
||||
|
2342
poetry.lock
generated
2342
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "nonebot2"
|
||||
version = "2.1.0"
|
||||
version = "2.2.0"
|
||||
description = "An asynchronous python bot framework."
|
||||
authors = ["yanyongyu <yyy@nonebot.dev>"]
|
||||
license = "MIT"
|
||||
@@ -29,33 +29,29 @@ python = "^3.8"
|
||||
yarl = "^1.7.2"
|
||||
pygtrie = "^2.4.1"
|
||||
loguru = ">=0.6.0,<1.0.0"
|
||||
python-dotenv = ">=0.21.0,<2.0.0"
|
||||
typing-extensions = ">=4.4.0,<5.0.0"
|
||||
pydantic = ">=1.10.0,<3.0.0,!=2.5.0,!=2.5.1"
|
||||
tomli = { version = "^2.0.1", python = "<3.11" }
|
||||
pydantic = { version = "^1.10.0", extras = ["dotenv"] }
|
||||
|
||||
websockets = { version = ">=10.0", optional = true }
|
||||
Quart = { version = ">=0.18.0,<1.0.0", optional = true }
|
||||
fastapi = { version = ">=0.93.0,<1.0.0", optional = true }
|
||||
aiohttp = { version = "^3.7.4", extras = ["speedups"], optional = true }
|
||||
aiohttp = { version = "^3.9.0b0", extras = ["speedups"], optional = true }
|
||||
httpx = { version = ">=0.20.0,<1.0.0", extras = ["http2"], optional = true }
|
||||
uvicorn = { version = ">=0.20.0,<1.0.0", extras = [
|
||||
"standard",
|
||||
], optional = true }
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "^0.2.0"
|
||||
isort = "^5.10.1"
|
||||
black = "^23.1.0"
|
||||
black = "^24.0.0"
|
||||
nonemoji = "^0.1.2"
|
||||
pre-commit = "^3.0.0"
|
||||
ruff = ">=0.0.272,<1.0.0"
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
nonebug = "^0.3.0"
|
||||
werkzeug = "^2.3.6"
|
||||
pytest-cov = "^4.0.0"
|
||||
pytest-xdist = "^3.0.2"
|
||||
pytest-asyncio = "^0.21.0"
|
||||
coverage-conditional-plugin = "^0.9.0"
|
||||
nonebot-test = { path = "./envs/test/", develop = false }
|
||||
|
||||
[tool.poetry.group.docs.dependencies]
|
||||
nb-autodoc = "^1.0.0a5"
|
||||
@@ -90,19 +86,21 @@ src_paths = ["nonebot", "tests"]
|
||||
extra_standard_library = ["typing_extensions"]
|
||||
|
||||
[tool.ruff]
|
||||
select = ["E", "W", "F", "UP", "C", "T", "PYI", "PT", "Q"]
|
||||
ignore = ["E402", "C901", "UP037"]
|
||||
|
||||
line-length = 88
|
||||
target-version = "py38"
|
||||
|
||||
[tool.ruff.flake8-pytest-style]
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "W", "F", "UP", "C", "T", "PYI", "PT", "Q"]
|
||||
ignore = ["E402", "C901", "UP037"]
|
||||
|
||||
[tool.ruff.lint.flake8-pytest-style]
|
||||
fixture-parentheses = false
|
||||
mark-parentheses = false
|
||||
|
||||
[tool.pyright]
|
||||
pythonVersion = "3.8"
|
||||
pythonPlatform = "All"
|
||||
defineConstant = { PYDANTIC_V2 = true }
|
||||
executionEnvironments = [
|
||||
{ root = "./tests", extraPaths = [
|
||||
"./",
|
||||
@@ -112,7 +110,7 @@ executionEnvironments = [
|
||||
|
||||
typeCheckingMode = "basic"
|
||||
reportShadowedImports = false
|
||||
|
||||
disableBytesTypePromotions = true
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry_core>=1.0.0"]
|
||||
|
11
scripts/build-api-docs.sh
Executable file
11
scripts/build-api-docs.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#! /usr/bin/env bash
|
||||
|
||||
# cd to the root of the project
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
poetry run nb-autodoc nonebot \
|
||||
-s nonebot.plugins \
|
||||
-u nonebot.internal \
|
||||
-u nonebot.internal.*
|
||||
cp -r ./build/nonebot/* ./website/docs/api/
|
||||
yarn prettier
|
7
scripts/run-tests.sh
Executable file
7
scripts/run-tests.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#! /usr/bin/env bash
|
||||
|
||||
# cd to the root of the tests
|
||||
cd "$(dirname "$0")/../tests"
|
||||
|
||||
# Run the tests
|
||||
pytest -n auto --cov-report xml $@
|
14
scripts/setup-envs.sh
Executable file
14
scripts/setup-envs.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#! /usr/bin/env bash
|
||||
|
||||
# config poetry to install env in project
|
||||
poetry config virtualenvs.in-project true
|
||||
|
||||
# setup dev environment
|
||||
echo "Setting up dev environment"
|
||||
poetry install --all-extras && poetry run pre-commit install && yarn install
|
||||
|
||||
# setup pydantic v2 test environment
|
||||
for env in $(find ./envs/ -maxdepth 1 -mindepth 1 -type d -not -name test); do
|
||||
echo "Setting up $env environment"
|
||||
(cd $env && poetry install --no-root)
|
||||
done
|
15
scripts/update-envs.sh
Executable file
15
scripts/update-envs.sh
Executable file
@@ -0,0 +1,15 @@
|
||||
#! /usr/bin/env bash
|
||||
|
||||
# update test env
|
||||
echo "Updating test env..."
|
||||
(cd ./envs/test/ && poetry update --lock)
|
||||
|
||||
# update dev env
|
||||
echo "Updating dev env..."
|
||||
poetry update
|
||||
|
||||
# update other envs
|
||||
for env in $(find ./envs/ -maxdepth 1 -mindepth 1 -type d -not -name test); do
|
||||
echo "Updating $env env..."
|
||||
(cd $env && poetry update)
|
||||
done
|
@@ -12,7 +12,7 @@ exclude_lines =
|
||||
@(abc\.)?abstractmethod
|
||||
raise NotImplementedError
|
||||
warnings\.warn
|
||||
\.\.\.
|
||||
^\.\.\.$
|
||||
pass
|
||||
if __name__ == .__main__.:
|
||||
|
||||
@@ -22,5 +22,8 @@ rules =
|
||||
"sys_platform != 'linux'": py-linux
|
||||
"sys_platform != 'darwin'": py-darwin
|
||||
"sys_version_info < (3, 9)": py-gte-39
|
||||
"sys_version_info >= (3, 9)": py-lt-39
|
||||
"sys_version_info < (3, 11)": py-gte-311
|
||||
"sys_version_info >= (3, 11)": py-lt-311
|
||||
"package_version('pydantic') < (2,)": pydantic-v2
|
||||
"package_version('pydantic') >= (2,)": pydantic-v1
|
||||
|
17
tests/.env.example
Normal file
17
tests/.env.example
Normal file
@@ -0,0 +1,17 @@
|
||||
SIMPLE=simple
|
||||
COMPLEX='
|
||||
[1, 2, 3]
|
||||
'
|
||||
COMPLEX_NONE
|
||||
COMPLEX_UNION=[1, 2, 3]
|
||||
NESTED={"a": 1}
|
||||
NESTED__B=2
|
||||
NESTED__C__C=3
|
||||
NESTED__COMPLEX=[1, 2, 3]
|
||||
NESTED_INNER__A=1
|
||||
NESTED_INNER__B=2
|
||||
OTHER_SIMPLE=simple
|
||||
OTHER_NESTED={"a": 1}
|
||||
OTHER_NESTED__B=2
|
||||
OTHER_NESTED_INNER__A=1
|
||||
OTHER_NESTED_INNER__B=2
|
@@ -13,3 +13,4 @@ NESTED_MISSING_DICT__A=1
|
||||
NESTED_MISSING_DICT__B__C=2
|
||||
NOT_NESTED=some string
|
||||
NOT_NESTED__A=1
|
||||
PLUGIN_CONFIG=1
|
||||
|
@@ -19,6 +19,8 @@ os.environ["CONFIG_OVERRIDE"] = "new"
|
||||
if TYPE_CHECKING:
|
||||
from nonebot.plugin import Plugin
|
||||
|
||||
collect_ignore = ["plugins/", "dynamic/", "bad_plugins/"]
|
||||
|
||||
|
||||
def pytest_configure(config: pytest.Config) -> None:
|
||||
config.stash[NONEBOT_INIT_KWARGS] = {"config_from_init": "init"}
|
||||
|
@@ -1,9 +1,15 @@
|
||||
import json
|
||||
import base64
|
||||
import socket
|
||||
from typing import Dict, List, Union, TypeVar
|
||||
|
||||
from wsproto.events import Ping
|
||||
from werkzeug import Request, Response
|
||||
from werkzeug.datastructures import MultiDict
|
||||
from wsproto.frame_protocol import CloseReason
|
||||
from wsproto.events import Request as WSRequest
|
||||
from wsproto import WSConnection, ConnectionType
|
||||
from wsproto.events import TextMessage, BytesMessage, CloseConnection, AcceptConnection
|
||||
|
||||
K = TypeVar("K")
|
||||
V = TypeVar("V")
|
||||
@@ -29,8 +35,7 @@ def flattern(d: "MultiDict[K, V]") -> Dict[K, Union[V, List[V]]]:
|
||||
return {k: v[0] if len(v) == 1 else v for k, v in d.to_dict(flat=False).items()}
|
||||
|
||||
|
||||
@Request.application
|
||||
def request_handler(request: Request) -> Response:
|
||||
def http_echo(request: Request) -> Response:
|
||||
try:
|
||||
_json = json.loads(request.data.decode("utf-8"))
|
||||
except (ValueError, TypeError):
|
||||
@@ -67,3 +72,65 @@ def request_handler(request: Request) -> Response:
|
||||
status=200,
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
|
||||
def websocket_echo(request: Request) -> Response:
|
||||
stream = request.environ["werkzeug.socket"]
|
||||
|
||||
ws = WSConnection(ConnectionType.SERVER)
|
||||
|
||||
in_data = b"GET %s HTTP/1.1\r\n" % request.path.encode("utf-8")
|
||||
for header, value in request.headers.items():
|
||||
in_data += f"{header}: {value}\r\n".encode()
|
||||
in_data += b"\r\n"
|
||||
|
||||
ws.receive_data(in_data)
|
||||
|
||||
running: bool = True
|
||||
while True:
|
||||
out_data = b""
|
||||
|
||||
for event in ws.events():
|
||||
if isinstance(event, WSRequest):
|
||||
out_data += ws.send(AcceptConnection())
|
||||
elif isinstance(event, CloseConnection):
|
||||
out_data += ws.send(event.response())
|
||||
running = False
|
||||
elif isinstance(event, Ping):
|
||||
out_data += ws.send(event.response())
|
||||
elif isinstance(event, TextMessage):
|
||||
if event.data == "quit":
|
||||
out_data += ws.send(
|
||||
CloseConnection(CloseReason.NORMAL_CLOSURE, "bye")
|
||||
)
|
||||
running = False
|
||||
else:
|
||||
out_data += ws.send(TextMessage(data=event.data))
|
||||
elif isinstance(event, BytesMessage):
|
||||
if event.data == b"quit":
|
||||
out_data += ws.send(
|
||||
CloseConnection(CloseReason.NORMAL_CLOSURE, "bye")
|
||||
)
|
||||
running = False
|
||||
else:
|
||||
out_data += ws.send(BytesMessage(data=event.data))
|
||||
|
||||
if out_data:
|
||||
stream.send(out_data)
|
||||
|
||||
if not running:
|
||||
break
|
||||
|
||||
in_data = stream.recv(4096)
|
||||
ws.receive_data(in_data)
|
||||
|
||||
stream.shutdown(socket.SHUT_RDWR)
|
||||
return Response("", status=204)
|
||||
|
||||
|
||||
@Request.application
|
||||
def request_handler(request: Request) -> Response:
|
||||
if request.headers.get("Connection") == "Upgrade":
|
||||
return websocket_echo(request)
|
||||
else:
|
||||
return http_echo(request)
|
||||
|
@@ -78,8 +78,7 @@ async def reject_preset(a: str = ArgStr(), b: str = ArgStr()):
|
||||
test_overload = on_message()
|
||||
|
||||
|
||||
class FakeEvent(Event):
|
||||
...
|
||||
class FakeEvent(Event): ...
|
||||
|
||||
|
||||
@test_overload.got("a")
|
||||
|
@@ -8,8 +8,7 @@ class Config(BaseModel):
|
||||
custom: str = ""
|
||||
|
||||
|
||||
class FakeAdapter(Adapter):
|
||||
...
|
||||
class FakeAdapter(Adapter): ...
|
||||
|
||||
|
||||
__plugin_meta__ = PluginMetadata(
|
||||
|
@@ -26,3 +26,14 @@ async def annotated_arg_str(key: Annotated[str, ArgStr()]) -> str:
|
||||
|
||||
async def annotated_arg_plain_text(key: Annotated[str, ArgPlainText()]) -> str:
|
||||
return key
|
||||
|
||||
|
||||
# test dependency priority
|
||||
async def annotated_prior_arg(key: Annotated[str, ArgStr("foo")] = ArgPlainText()):
|
||||
return key
|
||||
|
||||
|
||||
async def annotated_multi_arg(
|
||||
key: Annotated[Annotated[str, ArgStr("foo")], ArgPlainText()]
|
||||
):
|
||||
return key
|
||||
|
@@ -11,20 +11,17 @@ async def legacy_bot(bot):
|
||||
return bot
|
||||
|
||||
|
||||
async def not_legacy_bot(bot: int):
|
||||
...
|
||||
async def not_legacy_bot(bot: int): ...
|
||||
|
||||
|
||||
class FooBot(Bot):
|
||||
...
|
||||
class FooBot(Bot): ...
|
||||
|
||||
|
||||
async def sub_bot(b: FooBot) -> FooBot:
|
||||
return b
|
||||
|
||||
|
||||
class BarBot(Bot):
|
||||
...
|
||||
class BarBot(Bot): ...
|
||||
|
||||
|
||||
async def union_bot(b: Union[FooBot, BarBot]) -> Union[FooBot, BarBot]:
|
||||
@@ -45,5 +42,4 @@ async def generic_bot_none(b: CB) -> CB:
|
||||
return b
|
||||
|
||||
|
||||
async def not_bot(b: Union[int, Bot]):
|
||||
...
|
||||
async def not_bot(b: Union[int, Bot]): ...
|
||||
|
@@ -36,8 +36,7 @@ class ClassDependency:
|
||||
y: int = Depends(gen_async)
|
||||
|
||||
|
||||
class FooBot(Bot):
|
||||
...
|
||||
class FooBot(Bot): ...
|
||||
|
||||
|
||||
async def sub_bot(b: FooBot) -> FooBot:
|
||||
@@ -79,6 +78,12 @@ async def annotated_prior_depend(
|
||||
return x
|
||||
|
||||
|
||||
async def annotated_multi_depend(
|
||||
x: Annotated[Annotated[int, Depends(lambda: 2)], Depends(dependency)]
|
||||
):
|
||||
return x
|
||||
|
||||
|
||||
# test sub dependency type mismatch
|
||||
async def sub_type_mismatch(b: FooBot = Depends(sub_bot)):
|
||||
return b
|
||||
|
@@ -12,20 +12,17 @@ async def legacy_event(event):
|
||||
return event
|
||||
|
||||
|
||||
async def not_legacy_event(event: int):
|
||||
...
|
||||
async def not_legacy_event(event: int): ...
|
||||
|
||||
|
||||
class FooEvent(Event):
|
||||
...
|
||||
class FooEvent(Event): ...
|
||||
|
||||
|
||||
async def sub_event(e: FooEvent) -> FooEvent:
|
||||
return e
|
||||
|
||||
|
||||
class BarEvent(Event):
|
||||
...
|
||||
class BarEvent(Event): ...
|
||||
|
||||
|
||||
async def union_event(e: Union[FooEvent, BarEvent]) -> Union[FooEvent, BarEvent]:
|
||||
@@ -46,8 +43,7 @@ async def generic_event_none(e: CE) -> CE:
|
||||
return e
|
||||
|
||||
|
||||
async def not_event(e: Union[int, Event]):
|
||||
...
|
||||
async def not_event(e: Union[int, Event]): ...
|
||||
|
||||
|
||||
async def event_type(t: str = EventType()) -> str:
|
||||
|
@@ -4,3 +4,7 @@ from typing import Union
|
||||
async def exc(e: Exception, x: Union[ValueError, TypeError]) -> Exception:
|
||||
assert e == x
|
||||
return e
|
||||
|
||||
|
||||
async def legacy_exc(exception) -> Exception:
|
||||
return exception
|
||||
|
@@ -13,20 +13,17 @@ async def legacy_matcher(matcher):
|
||||
return matcher
|
||||
|
||||
|
||||
async def not_legacy_matcher(matcher: int):
|
||||
...
|
||||
async def not_legacy_matcher(matcher: int): ...
|
||||
|
||||
|
||||
class FooMatcher(Matcher):
|
||||
...
|
||||
class FooMatcher(Matcher): ...
|
||||
|
||||
|
||||
async def sub_matcher(m: FooMatcher) -> FooMatcher:
|
||||
return m
|
||||
|
||||
|
||||
class BarMatcher(Matcher):
|
||||
...
|
||||
class BarMatcher(Matcher): ...
|
||||
|
||||
|
||||
async def union_matcher(
|
||||
@@ -49,8 +46,7 @@ async def generic_matcher_none(m: CM) -> CM:
|
||||
return m
|
||||
|
||||
|
||||
async def not_matcher(m: Union[int, Matcher]):
|
||||
...
|
||||
async def not_matcher(m: Union[int, Matcher]): ...
|
||||
|
||||
|
||||
async def receive(e: Event = Received("test")) -> Event:
|
||||
|
@@ -29,8 +29,7 @@ async def legacy_state(state):
|
||||
return state
|
||||
|
||||
|
||||
async def not_legacy_state(state: int):
|
||||
...
|
||||
async def not_legacy_state(state: int): ...
|
||||
|
||||
|
||||
async def command(cmd: Tuple[str, ...] = Command()) -> Tuple[str, ...]:
|
||||
@@ -77,8 +76,13 @@ async def regex_matched(regex_matched: Match[str] = RegexMatched()) -> Match[str
|
||||
return regex_matched
|
||||
|
||||
|
||||
async def regex_str(regex_str: str = RegexStr()) -> str:
|
||||
return regex_str
|
||||
async def regex_str(
|
||||
entire: str = RegexStr(),
|
||||
type_: str = RegexStr("type"),
|
||||
second: str = RegexStr(2),
|
||||
groups: Tuple[str, ...] = RegexStr(1, "arg"),
|
||||
) -> Tuple[str, str, str, Tuple[str, ...]]:
|
||||
return entire, type_, second, groups
|
||||
|
||||
|
||||
async def startswith(startswith: str = Startswith()) -> str:
|
||||
|
@@ -19,5 +19,4 @@ async def complex_priority(
|
||||
arg: Message = Arg(),
|
||||
exception: Optional[Exception] = None,
|
||||
default: int = 1,
|
||||
):
|
||||
...
|
||||
): ...
|
||||
|
@@ -202,8 +202,7 @@ matcher_on_regex = on_regex(
|
||||
)
|
||||
|
||||
|
||||
class TestEvent(Event):
|
||||
...
|
||||
class TestEvent(Event): ...
|
||||
|
||||
|
||||
matcher_on_type = on_type(
|
||||
|
@@ -99,8 +99,7 @@ async def test_adapter_server(driver: Driver):
|
||||
async def handle_http(request: Request):
|
||||
return Response(200, content="test")
|
||||
|
||||
async def handle_ws(ws: WebSocket):
|
||||
...
|
||||
async def handle_ws(ws: WebSocket): ...
|
||||
|
||||
adapter = FakeAdapter(driver)
|
||||
|
||||
|
152
tests/test_adapters/test_bot.py
Normal file
152
tests/test_adapters/test_bot.py
Normal file
@@ -0,0 +1,152 @@
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import pytest
|
||||
from nonebug import App
|
||||
|
||||
from nonebot.adapters import Bot
|
||||
from nonebot.exception import MockApiException
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bot_call_api(app: App):
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
ctx.should_call_api("test", {}, True)
|
||||
result = await bot.call_api("test")
|
||||
|
||||
assert result is True
|
||||
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
ctx.should_call_api("test", {}, exception=RuntimeError("test"))
|
||||
with pytest.raises(RuntimeError, match="test"):
|
||||
await bot.call_api("test")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bot_calling_api_hook_simple(app: App):
|
||||
runned: bool = False
|
||||
|
||||
async def calling_api_hook(bot: Bot, api: str, data: Dict[str, Any]):
|
||||
nonlocal runned
|
||||
runned = True
|
||||
|
||||
hooks = set()
|
||||
|
||||
with pytest.MonkeyPatch.context() as m:
|
||||
m.setattr(Bot, "_calling_api_hook", hooks)
|
||||
|
||||
Bot.on_calling_api(calling_api_hook)
|
||||
|
||||
assert hooks == {calling_api_hook}
|
||||
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
ctx.should_call_api("test", {}, True)
|
||||
result = await bot.call_api("test")
|
||||
|
||||
assert runned is True
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bot_calling_api_hook_mock(app: App):
|
||||
runned: bool = False
|
||||
|
||||
async def calling_api_hook(bot: Bot, api: str, data: Dict[str, Any]):
|
||||
nonlocal runned
|
||||
runned = True
|
||||
|
||||
raise MockApiException(False)
|
||||
|
||||
hooks = set()
|
||||
|
||||
with pytest.MonkeyPatch.context() as m:
|
||||
m.setattr(Bot, "_calling_api_hook", hooks)
|
||||
|
||||
Bot.on_calling_api(calling_api_hook)
|
||||
|
||||
assert hooks == {calling_api_hook}
|
||||
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
result = await bot.call_api("test")
|
||||
|
||||
assert runned is True
|
||||
assert result is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bot_called_api_hook_simple(app: App):
|
||||
runned: bool = False
|
||||
|
||||
async def called_api_hook(
|
||||
bot: Bot,
|
||||
exception: Optional[Exception],
|
||||
api: str,
|
||||
data: Dict[str, Any],
|
||||
result: Any,
|
||||
):
|
||||
nonlocal runned
|
||||
runned = True
|
||||
|
||||
hooks = set()
|
||||
|
||||
with pytest.MonkeyPatch.context() as m:
|
||||
m.setattr(Bot, "_called_api_hook", hooks)
|
||||
|
||||
Bot.on_called_api(called_api_hook)
|
||||
|
||||
assert hooks == {called_api_hook}
|
||||
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
ctx.should_call_api("test", {}, True)
|
||||
result = await bot.call_api("test")
|
||||
|
||||
assert runned is True
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bot_called_api_hook_mock(app: App):
|
||||
runned: bool = False
|
||||
|
||||
async def called_api_hook(
|
||||
bot: Bot,
|
||||
exception: Optional[Exception],
|
||||
api: str,
|
||||
data: Dict[str, Any],
|
||||
result: Any,
|
||||
):
|
||||
nonlocal runned
|
||||
runned = True
|
||||
|
||||
raise MockApiException(False)
|
||||
|
||||
hooks = set()
|
||||
|
||||
with pytest.MonkeyPatch.context() as m:
|
||||
m.setattr(Bot, "_called_api_hook", hooks)
|
||||
|
||||
Bot.on_called_api(called_api_hook)
|
||||
|
||||
assert hooks == {called_api_hook}
|
||||
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
ctx.should_call_api("test", {}, True)
|
||||
result = await bot.call_api("test")
|
||||
|
||||
assert runned is True
|
||||
assert result is False
|
||||
|
||||
runned = False
|
||||
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
ctx.should_call_api("test", {}, exception=RuntimeError("test"))
|
||||
result = await bot.call_api("test")
|
||||
|
||||
assert runned is True
|
||||
assert result is False
|
@@ -1,8 +1,9 @@
|
||||
import pytest
|
||||
from pydantic import ValidationError, parse_obj_as
|
||||
from pydantic import ValidationError
|
||||
|
||||
from nonebot.adapters import Message
|
||||
from nonebot.compat import type_validate_python
|
||||
from utils import FakeMessage, FakeMessageSegment
|
||||
from nonebot.adapters import Message, MessageSegment
|
||||
|
||||
|
||||
def test_segment_data():
|
||||
@@ -47,16 +48,21 @@ def test_segment_add():
|
||||
|
||||
|
||||
def test_segment_validate():
|
||||
assert parse_obj_as(
|
||||
assert type_validate_python(
|
||||
FakeMessageSegment,
|
||||
{"type": "text", "data": {"text": "text"}, "extra": "should be ignored"},
|
||||
) == FakeMessageSegment.text("text")
|
||||
with pytest.raises(ValidationError):
|
||||
type_validate_python(
|
||||
type("FakeMessageSegment2", (MessageSegment,), {}),
|
||||
FakeMessageSegment.text("text"),
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
parse_obj_as(FakeMessageSegment, "some str")
|
||||
type_validate_python(FakeMessageSegment, "some str")
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
parse_obj_as(FakeMessageSegment, {"data": {}})
|
||||
type_validate_python(FakeMessageSegment, {"data": {}})
|
||||
|
||||
|
||||
def test_segment_join():
|
||||
@@ -144,26 +150,26 @@ def test_message_getitem():
|
||||
|
||||
|
||||
def test_message_validate():
|
||||
assert parse_obj_as(FakeMessage, FakeMessage([])) == FakeMessage([])
|
||||
assert type_validate_python(FakeMessage, FakeMessage([])) == FakeMessage([])
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
parse_obj_as(type("FakeMessage2", (Message,), {}), FakeMessage([]))
|
||||
type_validate_python(type("FakeMessage2", (Message,), {}), FakeMessage([]))
|
||||
|
||||
assert parse_obj_as(FakeMessage, "text") == FakeMessage(
|
||||
assert type_validate_python(FakeMessage, "text") == FakeMessage(
|
||||
[FakeMessageSegment.text("text")]
|
||||
)
|
||||
|
||||
assert parse_obj_as(
|
||||
assert type_validate_python(
|
||||
FakeMessage, {"type": "text", "data": {"text": "text"}}
|
||||
) == FakeMessage([FakeMessageSegment.text("text")])
|
||||
|
||||
assert parse_obj_as(
|
||||
assert type_validate_python(
|
||||
FakeMessage,
|
||||
[FakeMessageSegment.text("text"), {"type": "text", "data": {"text": "text"}}],
|
||||
) == FakeMessage([FakeMessageSegment.text("text"), FakeMessageSegment.text("text")])
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
parse_obj_as(FakeMessage, object())
|
||||
type_validate_python(FakeMessage, object())
|
||||
|
||||
|
||||
def test_message_contains():
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import pytest
|
||||
|
||||
from nonebot.adapters import MessageTemplate
|
||||
from utils import FakeMessage, FakeMessageSegment, escape_text
|
||||
|
||||
@@ -15,12 +17,8 @@ def test_template_message():
|
||||
def custom(input: str) -> str:
|
||||
return f"{input}-custom!"
|
||||
|
||||
try:
|
||||
with pytest.raises(ValueError, match="already exists"):
|
||||
template.add_format_spec(custom)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
raise AssertionError("Should raise ValueError")
|
||||
|
||||
format_args = {
|
||||
"a": "custom",
|
||||
@@ -57,3 +55,22 @@ def test_message_injection():
|
||||
message = template.format(name="[fake:image]")
|
||||
|
||||
assert message.extract_plain_text() == escape_text("[fake:image]Is Bad")
|
||||
|
||||
|
||||
def test_malformed_template():
|
||||
positive_template = FakeMessage.template("{a}{b}")
|
||||
message = positive_template.format(a="a", b="b")
|
||||
assert message.extract_plain_text() == "ab"
|
||||
|
||||
malformed_template = FakeMessage.template("{a.__init__}")
|
||||
with pytest.raises(ValueError, match="private attribute"):
|
||||
message = malformed_template.format(a="a")
|
||||
|
||||
malformed_template = FakeMessage.template("{a[__builtins__]}")
|
||||
with pytest.raises(ValueError, match="private attribute"):
|
||||
message = malformed_template.format(a=globals())
|
||||
|
||||
malformed_template = MessageTemplate(
|
||||
"{a[__builtins__][__import__]}{b.__init__}", private_getattr=True
|
||||
)
|
||||
message = malformed_template.format(a=globals(), b="b")
|
||||
|
68
tests/test_compat.py
Normal file
68
tests/test_compat.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from typing import Any
|
||||
from dataclasses import dataclass
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel
|
||||
|
||||
from nonebot.compat import (
|
||||
DEFAULT_CONFIG,
|
||||
Required,
|
||||
FieldInfo,
|
||||
PydanticUndefined,
|
||||
model_dump,
|
||||
custom_validation,
|
||||
type_validate_python,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_default_config():
|
||||
assert DEFAULT_CONFIG.get("extra") == "allow"
|
||||
assert DEFAULT_CONFIG.get("arbitrary_types_allowed") is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_field_info():
|
||||
# required should be convert to PydanticUndefined
|
||||
assert FieldInfo(Required).default is PydanticUndefined
|
||||
|
||||
# field info should allow extra attributes
|
||||
assert FieldInfo(test="test").extra["test"] == "test"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_model_dump():
|
||||
class TestModel(BaseModel):
|
||||
test1: int
|
||||
test2: int
|
||||
|
||||
assert model_dump(TestModel(test1=1, test2=2), include={"test1"}) == {"test1": 1}
|
||||
assert model_dump(TestModel(test1=1, test2=2), exclude={"test1"}) == {"test2": 2}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_custom_validation():
|
||||
called = []
|
||||
|
||||
@custom_validation
|
||||
@dataclass
|
||||
class TestModel:
|
||||
test: int
|
||||
|
||||
@classmethod
|
||||
def __get_validators__(cls):
|
||||
yield cls._validate_1
|
||||
yield cls._validate_2
|
||||
|
||||
@classmethod
|
||||
def _validate_1(cls, v: Any) -> Any:
|
||||
called.append(1)
|
||||
return v
|
||||
|
||||
@classmethod
|
||||
def _validate_2(cls, v: Any) -> Any:
|
||||
called.append(2)
|
||||
return cls(test=v["test"])
|
||||
|
||||
assert type_validate_python(TestModel, {"test": 1}) == TestModel(test=1)
|
||||
assert called == [1, 2]
|
118
tests/test_config.py
Normal file
118
tests/test_config.py
Normal file
@@ -0,0 +1,118 @@
|
||||
from typing import List, Union, Optional
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel
|
||||
|
||||
from nonebot.config import DOTENV_TYPE, BaseSettings, SettingsError
|
||||
|
||||
|
||||
class Simple(BaseModel):
|
||||
a: int = 0
|
||||
b: int = 0
|
||||
c: dict = {}
|
||||
complex: list = []
|
||||
|
||||
|
||||
class Example(BaseSettings):
|
||||
_env_file: Optional[DOTENV_TYPE] = ".env", ".env.example"
|
||||
_env_nested_delimiter: Optional[str] = "__"
|
||||
|
||||
simple: str = ""
|
||||
complex: List[int] = [1]
|
||||
complex_none: Optional[List[int]] = None
|
||||
complex_union: Union[int, List[int]] = 1
|
||||
nested: Simple = Simple()
|
||||
nested_inner: Simple = Simple()
|
||||
|
||||
class Config:
|
||||
env_file = ".env", ".env.example"
|
||||
env_nested_delimiter = "__"
|
||||
|
||||
|
||||
class ExampleWithoutDelimiter(Example):
|
||||
class Config:
|
||||
env_nested_delimiter = None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_config_no_env():
|
||||
config = Example(_env_file=None)
|
||||
assert config.simple == ""
|
||||
with pytest.raises(AttributeError):
|
||||
config.common_config
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_config_with_env():
|
||||
config = Example(_env_file=(".env", ".env.example"))
|
||||
assert config.simple == "simple"
|
||||
|
||||
assert config.complex == [1, 2, 3]
|
||||
|
||||
assert config.complex_none is None
|
||||
|
||||
assert config.complex_union == [1, 2, 3]
|
||||
|
||||
assert config.nested.a == 1
|
||||
assert config.nested.b == 2
|
||||
assert config.nested.c == {"c": "3"}
|
||||
assert config.nested.complex == [1, 2, 3]
|
||||
with pytest.raises(AttributeError):
|
||||
config.nested__b
|
||||
with pytest.raises(AttributeError):
|
||||
config.nested__c__c
|
||||
with pytest.raises(AttributeError):
|
||||
config.nested__complex
|
||||
|
||||
assert config.nested_inner.a == 1
|
||||
assert config.nested_inner.b == 2
|
||||
with pytest.raises(AttributeError):
|
||||
config.nested_inner__a
|
||||
with pytest.raises(AttributeError):
|
||||
config.nested_inner__b
|
||||
|
||||
assert config.common_config == "common"
|
||||
|
||||
assert config.other_simple == "simple"
|
||||
|
||||
assert config.other_nested == {"a": 1, "b": 2}
|
||||
with pytest.raises(AttributeError):
|
||||
config.other_nested__b
|
||||
|
||||
assert config.other_nested_inner == {"a": 1, "b": 2}
|
||||
with pytest.raises(AttributeError):
|
||||
config.other_nested_inner__a
|
||||
with pytest.raises(AttributeError):
|
||||
config.other_nested_inner__b
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_config_error_env():
|
||||
with pytest.MonkeyPatch().context() as m:
|
||||
m.setenv("COMPLEX", "not json")
|
||||
|
||||
with pytest.raises(SettingsError):
|
||||
Example(_env_file=(".env", ".env.example"))
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_config_without_delimiter():
|
||||
config = ExampleWithoutDelimiter()
|
||||
assert config.nested.a == 1
|
||||
assert config.nested.b == 0
|
||||
assert config.nested__b == 2
|
||||
assert config.nested.c == {}
|
||||
assert config.nested__c__c == 3
|
||||
assert config.nested.complex == []
|
||||
assert config.nested__complex == [1, 2, 3]
|
||||
|
||||
assert config.nested_inner.a == 0
|
||||
assert config.nested_inner.b == 0
|
||||
|
||||
assert config.other_nested == {"a": 1}
|
||||
assert config.other_nested__b == 2
|
||||
|
||||
with pytest.raises(AttributeError):
|
||||
config.other_nested_inner
|
||||
assert config.other_nested_inner__a == 1
|
||||
assert config.other_nested_inner__b == 2
|
@@ -5,11 +5,11 @@ from typing import Any, Set, Optional
|
||||
import pytest
|
||||
from nonebug import App
|
||||
|
||||
from utils import FakeAdapter
|
||||
from nonebot.adapters import Bot
|
||||
from nonebot.params import Depends
|
||||
from nonebot.dependencies import Dependent
|
||||
from nonebot.exception import WebSocketClosed
|
||||
from nonebot.drivers._lifespan import Lifespan
|
||||
from nonebot.drivers import (
|
||||
URL,
|
||||
Driver,
|
||||
@@ -25,34 +25,50 @@ from nonebot.drivers import (
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_lifespan():
|
||||
lifespan = Lifespan()
|
||||
@pytest.mark.parametrize(
|
||||
"driver", [pytest.param("nonebot.drivers.none:Driver", id="none")], indirect=True
|
||||
)
|
||||
async def test_lifespan(driver: Driver):
|
||||
adapter = FakeAdapter(driver)
|
||||
|
||||
start_log = []
|
||||
ready_log = []
|
||||
shutdown_log = []
|
||||
|
||||
@lifespan.on_startup
|
||||
@driver.on_startup
|
||||
async def _startup1():
|
||||
assert start_log == []
|
||||
start_log.append(1)
|
||||
|
||||
@lifespan.on_startup
|
||||
@driver.on_startup
|
||||
async def _startup2():
|
||||
assert start_log == [1]
|
||||
start_log.append(2)
|
||||
|
||||
@lifespan.on_shutdown
|
||||
@adapter.on_ready
|
||||
def _ready1():
|
||||
assert start_log == [1, 2]
|
||||
assert ready_log == []
|
||||
ready_log.append(1)
|
||||
|
||||
@adapter.on_ready
|
||||
def _ready2():
|
||||
assert ready_log == [1]
|
||||
ready_log.append(2)
|
||||
|
||||
@driver.on_shutdown
|
||||
async def _shutdown1():
|
||||
assert shutdown_log == []
|
||||
shutdown_log.append(1)
|
||||
|
||||
@lifespan.on_shutdown
|
||||
@driver.on_shutdown
|
||||
async def _shutdown2():
|
||||
assert shutdown_log == [1]
|
||||
shutdown_log.append(2)
|
||||
|
||||
async with lifespan:
|
||||
async with driver._lifespan:
|
||||
assert start_log == [1, 2]
|
||||
assert ready_log == [1, 2]
|
||||
|
||||
assert shutdown_log == [1, 2]
|
||||
|
||||
@@ -115,7 +131,7 @@ async def test_websocket_server(app: App, driver: Driver):
|
||||
assert data == b"ping"
|
||||
await ws.send(b"pong")
|
||||
|
||||
with pytest.raises(WebSocketClosed):
|
||||
with pytest.raises(WebSocketClosed, match=r"code=1000"):
|
||||
await ws.receive()
|
||||
|
||||
ws_setup = WebSocketServerSetup(URL("/ws_test"), "ws_test", _handle_ws)
|
||||
@@ -136,7 +152,7 @@ async def test_websocket_server(app: App, driver: Driver):
|
||||
await ws.send_bytes(b"ping")
|
||||
assert await ws.receive_bytes() == b"pong"
|
||||
|
||||
await ws.close()
|
||||
await ws.close(code=1000)
|
||||
|
||||
await asyncio.sleep(1)
|
||||
|
||||
@@ -299,9 +315,29 @@ async def test_http_client(driver: Driver, server_url: URL):
|
||||
],
|
||||
indirect=True,
|
||||
)
|
||||
async def test_websocket_client(driver: Driver):
|
||||
async def test_websocket_client(driver: Driver, server_url: URL):
|
||||
assert isinstance(driver, WebSocketClientMixin)
|
||||
|
||||
request = Request("GET", server_url.with_scheme("ws"))
|
||||
async with driver.websocket(request) as ws:
|
||||
await ws.send("test")
|
||||
assert await ws.receive() == "test"
|
||||
|
||||
await ws.send(b"test")
|
||||
assert await ws.receive() == b"test"
|
||||
|
||||
await ws.send_text("test")
|
||||
assert await ws.receive_text() == "test"
|
||||
|
||||
await ws.send_bytes(b"test")
|
||||
assert await ws.receive_bytes() == b"test"
|
||||
|
||||
await ws.send("quit")
|
||||
with pytest.raises(WebSocketClosed, match=r"code=1000"):
|
||||
await ws.receive()
|
||||
|
||||
await asyncio.sleep(1)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
|
33
tests/test_echo.py
Normal file
33
tests/test_echo.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import pytest
|
||||
from nonebug import App
|
||||
|
||||
from utils import FakeMessage, FakeMessageSegment, make_fake_event
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_echo(app: App):
|
||||
from nonebot.plugins.echo import echo
|
||||
|
||||
async with app.test_matcher(echo) as ctx:
|
||||
bot = ctx.create_bot()
|
||||
|
||||
message = FakeMessage("/echo 123")
|
||||
event = make_fake_event(_message=message)()
|
||||
ctx.receive_event(bot, event)
|
||||
ctx.should_call_send(event, FakeMessage("123"), True, bot=bot)
|
||||
|
||||
message = FakeMessageSegment.text("/echo 123") + FakeMessageSegment.image(
|
||||
"test"
|
||||
)
|
||||
event = make_fake_event(_message=message)()
|
||||
ctx.receive_event(bot, event)
|
||||
ctx.should_call_send(
|
||||
event,
|
||||
FakeMessageSegment.text("123") + FakeMessageSegment.image("test"),
|
||||
True,
|
||||
bot=bot,
|
||||
)
|
||||
|
||||
message = FakeMessage("/echo")
|
||||
event = make_fake_event(_message=message)()
|
||||
ctx.receive_event(bot, event)
|
@@ -4,11 +4,12 @@ from pathlib import Path
|
||||
import pytest
|
||||
from nonebug import App
|
||||
|
||||
from nonebot.rule import Rule
|
||||
from nonebot import get_plugin
|
||||
from nonebot.permission import User
|
||||
from nonebot.matcher import Matcher, matchers
|
||||
from utils import FakeMessage, make_fake_event
|
||||
from nonebot.message import check_and_run_matcher
|
||||
from nonebot.permission import User, Permission
|
||||
from nonebot.message import _check_matcher, check_and_run_matcher
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -40,6 +41,50 @@ async def test_matcher_info(app: App):
|
||||
assert matcher._source.lineno == 3
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_matcher_check(app: App):
|
||||
async def falsy():
|
||||
return False
|
||||
|
||||
async def truthy():
|
||||
return True
|
||||
|
||||
async def error():
|
||||
raise RuntimeError
|
||||
|
||||
event = make_fake_event(_type="test")()
|
||||
with app.provider.context({}):
|
||||
test_perm_falsy = Matcher.new(permission=Permission(falsy))
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
assert await _check_matcher(test_perm_falsy, bot, event, {}) is False
|
||||
|
||||
test_perm_truthy = Matcher.new(permission=Permission(truthy))
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
assert await _check_matcher(test_perm_truthy, bot, event, {}) is True
|
||||
|
||||
test_perm_error = Matcher.new(permission=Permission(error))
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
assert await _check_matcher(test_perm_error, bot, event, {}) is False
|
||||
|
||||
test_rule_falsy = Matcher.new(rule=Rule(falsy))
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
assert await _check_matcher(test_rule_falsy, bot, event, {}) is False
|
||||
|
||||
test_rule_truthy = Matcher.new(rule=Rule(truthy))
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
assert await _check_matcher(test_rule_truthy, bot, event, {}) is True
|
||||
|
||||
test_rule_error = Matcher.new(rule=Rule(error))
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
assert await _check_matcher(test_rule_error, bot, event, {}) is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_matcher_handle(app: App):
|
||||
from plugins.matcher.matcher_process import test_handle
|
||||
@@ -95,7 +140,7 @@ async def test_matcher_receive(app: App):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_matcher_(app: App):
|
||||
async def test_matcher_combine(app: App):
|
||||
from plugins.matcher.matcher_process import test_combine
|
||||
|
||||
message = FakeMessage("text")
|
||||
|
@@ -51,6 +51,7 @@ async def test_depend(app: App):
|
||||
sub_type_mismatch,
|
||||
validate_field_fail,
|
||||
annotated_class_depend,
|
||||
annotated_multi_depend,
|
||||
annotated_prior_depend,
|
||||
)
|
||||
|
||||
@@ -81,7 +82,13 @@ async def test_depend(app: App):
|
||||
annotated_prior_depend, allow_types=[DependParam]
|
||||
) as ctx:
|
||||
ctx.should_return(1)
|
||||
assert runned == [1, 1]
|
||||
|
||||
async with app.test_dependent(
|
||||
annotated_multi_depend, allow_types=[DependParam]
|
||||
) as ctx:
|
||||
ctx.should_return(1)
|
||||
|
||||
assert runned == [1, 1, 1]
|
||||
|
||||
async with app.test_dependent(
|
||||
annotated_class_depend, allow_types=[DependParam]
|
||||
@@ -211,7 +218,7 @@ async def test_event(app: App):
|
||||
|
||||
async with app.test_dependent(union_event, allow_types=[EventParam]) as ctx:
|
||||
ctx.pass_params(event=fake_fooevent)
|
||||
ctx.should_return(fake_event)
|
||||
ctx.should_return(fake_fooevent)
|
||||
|
||||
async with app.test_dependent(generic_event, allow_types=[EventParam]) as ctx:
|
||||
ctx.pass_params(event=fake_event)
|
||||
@@ -354,7 +361,9 @@ async def test_state(app: App):
|
||||
regex_str, allow_types=[StateParam, DependParam]
|
||||
) as ctx:
|
||||
ctx.pass_params(state=fake_state)
|
||||
ctx.should_return("[cq:test,arg=value]")
|
||||
ctx.should_return(
|
||||
("[cq:test,arg=value]", "test", "arg=value", ("test", "arg=value"))
|
||||
)
|
||||
|
||||
async with app.test_dependent(
|
||||
regex_group, allow_types=[StateParam, DependParam]
|
||||
@@ -474,6 +483,8 @@ async def test_arg(app: App):
|
||||
annotated_arg,
|
||||
arg_plain_text,
|
||||
annotated_arg_str,
|
||||
annotated_multi_arg,
|
||||
annotated_prior_arg,
|
||||
annotated_arg_plain_text,
|
||||
)
|
||||
|
||||
@@ -507,16 +518,28 @@ async def test_arg(app: App):
|
||||
ctx.pass_params(matcher=matcher)
|
||||
ctx.should_return(message.extract_plain_text())
|
||||
|
||||
async with app.test_dependent(annotated_multi_arg, allow_types=[ArgParam]) as ctx:
|
||||
ctx.pass_params(matcher=matcher)
|
||||
ctx.should_return(message.extract_plain_text())
|
||||
|
||||
async with app.test_dependent(annotated_prior_arg, allow_types=[ArgParam]) as ctx:
|
||||
ctx.pass_params(matcher=matcher)
|
||||
ctx.should_return(message.extract_plain_text())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_exception(app: App):
|
||||
from plugins.param.param_exception import exc
|
||||
from plugins.param.param_exception import exc, legacy_exc
|
||||
|
||||
exception = ValueError("test")
|
||||
async with app.test_dependent(exc, allow_types=[ExceptionParam]) as ctx:
|
||||
ctx.pass_params(exception=exception)
|
||||
ctx.should_return(exception)
|
||||
|
||||
async with app.test_dependent(legacy_exc, allow_types=[ExceptionParam]) as ctx:
|
||||
ctx.pass_params(exception=exception)
|
||||
ctx.should_return(exception)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_default(app: App):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user