mirror of
https://github.com/nonebot/nonebot2.git
synced 2025-07-16 02:50:48 +00:00
✨ Feature: 迁移至结构化并发框架 AnyIO (#3053)
This commit is contained in:
@ -1,8 +1,10 @@
|
||||
import os
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from functools import wraps
|
||||
from collections.abc import Generator
|
||||
from typing_extensions import ParamSpec
|
||||
from typing import TYPE_CHECKING, TypeVar, Callable
|
||||
|
||||
import pytest
|
||||
from nonebug import NONEBOT_INIT_KWARGS
|
||||
@ -20,6 +22,9 @@ os.environ["CONFIG_OVERRIDE"] = "new"
|
||||
if TYPE_CHECKING:
|
||||
from nonebot.plugin import Plugin
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
collect_ignore = ["plugins/", "dynamic/", "bad_plugins/"]
|
||||
|
||||
|
||||
@ -38,14 +43,36 @@ def load_driver(request: pytest.FixtureRequest) -> Driver:
|
||||
return DriverClass(Env(environment=global_driver.env), global_driver.config)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", params=[pytest.param("asyncio"), pytest.param("trio")])
|
||||
def anyio_backend(request: pytest.FixtureRequest):
|
||||
return request.param
|
||||
|
||||
|
||||
def run_once(func: Callable[P, R]) -> Callable[P, R]:
|
||||
result = ...
|
||||
|
||||
@wraps(func)
|
||||
def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
nonlocal result
|
||||
if result is not Ellipsis:
|
||||
return result
|
||||
|
||||
result = func(*args, **kwargs)
|
||||
return result
|
||||
|
||||
return _wrapper
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def load_plugin(nonebug_init: None) -> set["Plugin"]:
|
||||
@run_once
|
||||
def load_plugin(anyio_backend, nonebug_init: None) -> set["Plugin"]:
|
||||
# preload global plugins
|
||||
return nonebot.load_plugins(str(Path(__file__).parent / "plugins"))
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def load_builtin_plugin(nonebug_init: None) -> set["Plugin"]:
|
||||
@run_once
|
||||
def load_builtin_plugin(anyio_backend, nonebug_init: None) -> set["Plugin"]:
|
||||
# preload builtin plugins
|
||||
return nonebot.load_builtin_plugins("echo", "single_session")
|
||||
|
||||
|
@ -17,7 +17,7 @@ from nonebot.drivers import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_adapter_connect(app: App, driver: Driver):
|
||||
last_connect_bot: Optional[Bot] = None
|
||||
last_disconnect_bot: Optional[Bot] = None
|
||||
@ -45,7 +45,6 @@ async def test_adapter_connect(app: App, driver: Driver):
|
||||
assert bot.self_id not in adapter.bots
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"driver",
|
||||
[
|
||||
@ -75,7 +74,7 @@ async def test_adapter_connect(app: App, driver: Driver):
|
||||
],
|
||||
indirect=True,
|
||||
)
|
||||
async def test_adapter_server(driver: Driver):
|
||||
def test_adapter_server(driver: Driver):
|
||||
last_http_setup: Optional[HTTPServerSetup] = None
|
||||
last_ws_setup: Optional[WebSocketServerSetup] = None
|
||||
|
||||
@ -112,7 +111,7 @@ async def test_adapter_server(driver: Driver):
|
||||
assert last_ws_setup is setup
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
"driver",
|
||||
[
|
||||
@ -159,7 +158,7 @@ async def test_adapter_http_client(driver: Driver):
|
||||
assert last_request is request
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
"driver",
|
||||
[
|
||||
|
@ -1,5 +1,6 @@
|
||||
from typing import Any, Optional
|
||||
|
||||
import anyio
|
||||
import pytest
|
||||
from nonebug import App
|
||||
|
||||
@ -7,7 +8,7 @@ from nonebot.adapters import Bot
|
||||
from nonebot.exception import MockApiException
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_bot_call_api(app: App):
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
@ -23,7 +24,7 @@ async def test_bot_call_api(app: App):
|
||||
await bot.call_api("test")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_bot_calling_api_hook_simple(app: App):
|
||||
runned: bool = False
|
||||
|
||||
@ -49,7 +50,7 @@ async def test_bot_calling_api_hook_simple(app: App):
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_bot_calling_api_hook_mock(app: App):
|
||||
runned: bool = False
|
||||
|
||||
@ -76,7 +77,47 @@ async def test_bot_calling_api_hook_mock(app: App):
|
||||
assert result is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_bot_calling_api_hook_multi_mock(app: App):
|
||||
runned1: bool = False
|
||||
runned2: bool = False
|
||||
event = anyio.Event()
|
||||
|
||||
async def calling_api_hook1(bot: Bot, api: str, data: dict[str, Any]):
|
||||
nonlocal runned1
|
||||
runned1 = True
|
||||
event.set()
|
||||
|
||||
raise MockApiException(1)
|
||||
|
||||
async def calling_api_hook2(bot: Bot, api: str, data: dict[str, Any]):
|
||||
nonlocal runned2
|
||||
runned2 = True
|
||||
with anyio.fail_after(1):
|
||||
await event.wait()
|
||||
|
||||
raise MockApiException(2)
|
||||
|
||||
hooks = set()
|
||||
|
||||
with pytest.MonkeyPatch.context() as m:
|
||||
m.setattr(Bot, "_calling_api_hook", hooks)
|
||||
|
||||
Bot.on_calling_api(calling_api_hook1)
|
||||
Bot.on_calling_api(calling_api_hook2)
|
||||
|
||||
assert hooks == {calling_api_hook1, calling_api_hook2}
|
||||
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
result = await bot.call_api("test")
|
||||
|
||||
assert runned1 is True
|
||||
assert runned2 is True
|
||||
assert result == 1
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_bot_called_api_hook_simple(app: App):
|
||||
runned: bool = False
|
||||
|
||||
@ -108,7 +149,7 @@ async def test_bot_called_api_hook_simple(app: App):
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_bot_called_api_hook_mock(app: App):
|
||||
runned: bool = False
|
||||
|
||||
@ -150,3 +191,56 @@ async def test_bot_called_api_hook_mock(app: App):
|
||||
|
||||
assert runned is True
|
||||
assert result is False
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_bot_called_api_hook_multi_mock(app: App):
|
||||
runned1: bool = False
|
||||
runned2: bool = False
|
||||
event = anyio.Event()
|
||||
|
||||
async def called_api_hook1(
|
||||
bot: Bot,
|
||||
exception: Optional[Exception],
|
||||
api: str,
|
||||
data: dict[str, Any],
|
||||
result: Any,
|
||||
):
|
||||
nonlocal runned1
|
||||
runned1 = True
|
||||
event.set()
|
||||
|
||||
raise MockApiException(1)
|
||||
|
||||
async def called_api_hook2(
|
||||
bot: Bot,
|
||||
exception: Optional[Exception],
|
||||
api: str,
|
||||
data: dict[str, Any],
|
||||
result: Any,
|
||||
):
|
||||
nonlocal runned2
|
||||
runned2 = True
|
||||
with anyio.fail_after(1):
|
||||
await event.wait()
|
||||
|
||||
raise MockApiException(2)
|
||||
|
||||
hooks = set()
|
||||
|
||||
with pytest.MonkeyPatch.context() as m:
|
||||
m.setattr(Bot, "_called_api_hook", hooks)
|
||||
|
||||
Bot.on_called_api(called_api_hook1)
|
||||
Bot.on_called_api(called_api_hook2)
|
||||
|
||||
assert hooks == {called_api_hook1, called_api_hook2}
|
||||
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
ctx.should_call_api("test", {}, True)
|
||||
result = await bot.call_api("test")
|
||||
|
||||
assert runned1 is True
|
||||
assert runned2 is True
|
||||
assert result == 1
|
||||
|
@ -25,7 +25,7 @@ async def _dependency() -> int:
|
||||
return 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_event_preprocessor(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
with monkeypatch.context() as m:
|
||||
m.setattr(message, "_event_preprocessors", set())
|
||||
@ -58,7 +58,7 @@ async def test_event_preprocessor(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
assert runned, "event_preprocessor should runned"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_event_preprocessor_ignore(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
with monkeypatch.context() as m:
|
||||
m.setattr(message, "_event_preprocessors", set())
|
||||
@ -88,7 +88,7 @@ async def test_event_preprocessor_ignore(app: App, monkeypatch: pytest.MonkeyPat
|
||||
assert not runned, "matcher should not runned"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_event_preprocessor_exception(
|
||||
app: App, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str]
|
||||
):
|
||||
@ -132,7 +132,7 @@ async def test_event_preprocessor_exception(
|
||||
assert "RuntimeError: test" in capsys.readouterr().out
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_event_postprocessor(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
with monkeypatch.context() as m:
|
||||
m.setattr(message, "_event_postprocessors", set())
|
||||
@ -165,7 +165,7 @@ async def test_event_postprocessor(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
assert runned, "event_postprocessor should runned"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_event_postprocessor_exception(
|
||||
app: App, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str]
|
||||
):
|
||||
@ -202,7 +202,7 @@ async def test_event_postprocessor_exception(
|
||||
assert "RuntimeError: test" in capsys.readouterr().out
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_run_preprocessor(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
with monkeypatch.context() as m:
|
||||
m.setattr(message, "_run_preprocessors", set())
|
||||
@ -239,7 +239,7 @@ async def test_run_preprocessor(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
assert runned, "run_preprocessor should runned"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_run_preprocessor_ignore(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
with monkeypatch.context() as m:
|
||||
m.setattr(message, "_run_preprocessors", set())
|
||||
@ -269,7 +269,7 @@ async def test_run_preprocessor_ignore(app: App, monkeypatch: pytest.MonkeyPatch
|
||||
assert not runned, "matcher should not runned"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_run_preprocessor_exception(
|
||||
app: App, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str]
|
||||
):
|
||||
@ -313,7 +313,7 @@ async def test_run_preprocessor_exception(
|
||||
assert "RuntimeError: test" in capsys.readouterr().out
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_run_postprocessor(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
with monkeypatch.context() as m:
|
||||
m.setattr(message, "_run_postprocessors", set())
|
||||
@ -351,7 +351,7 @@ async def test_run_postprocessor(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
assert runned, "run_postprocessor should runned"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_run_postprocessor_exception(
|
||||
app: App, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str]
|
||||
):
|
||||
|
@ -17,14 +17,12 @@ from nonebot.compat import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_default_config():
|
||||
def test_default_config():
|
||||
assert DEFAULT_CONFIG.get("extra") == "allow"
|
||||
assert DEFAULT_CONFIG.get("arbitrary_types_allowed") is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_field_info():
|
||||
def test_field_info():
|
||||
# required should be convert to PydanticUndefined
|
||||
assert FieldInfo(Required).default is PydanticUndefined
|
||||
|
||||
@ -32,8 +30,7 @@ async def test_field_info():
|
||||
assert FieldInfo(test="test").extra["test"] == "test"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_type_adapter():
|
||||
def test_type_adapter():
|
||||
t = TypeAdapter(Annotated[int, FieldInfo(ge=1)])
|
||||
|
||||
assert t.validate_python(2) == 2
|
||||
@ -47,8 +44,7 @@ async def test_type_adapter():
|
||||
t.validate_json("0")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_model_dump():
|
||||
def test_model_dump():
|
||||
class TestModel(BaseModel):
|
||||
test1: int
|
||||
test2: int
|
||||
@ -57,8 +53,7 @@ async def test_model_dump():
|
||||
assert model_dump(TestModel(test1=1, test2=2), exclude={"test1"}) == {"test2": 2}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_custom_validation():
|
||||
def test_custom_validation():
|
||||
called = []
|
||||
|
||||
@custom_validation
|
||||
@ -85,8 +80,7 @@ async def test_custom_validation():
|
||||
assert called == [1, 2]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_validate_json():
|
||||
def test_validate_json():
|
||||
class TestModel(BaseModel):
|
||||
test1: int
|
||||
test2: str
|
||||
|
@ -50,16 +50,14 @@ class ExampleWithoutDelimiter(Example):
|
||||
env_nested_delimiter = None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_config_no_env():
|
||||
def test_config_no_env():
|
||||
config = Example(_env_file=None)
|
||||
assert config.simple == ""
|
||||
with pytest.raises(AttributeError):
|
||||
config.common_config
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_config_with_env():
|
||||
def test_config_with_env():
|
||||
config = Example(_env_file=(".env", ".env.example"))
|
||||
assert config.simple == "simple"
|
||||
|
||||
@ -102,8 +100,7 @@ async def test_config_with_env():
|
||||
config.other_nested_inner__b
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_config_error_env():
|
||||
def test_config_error_env():
|
||||
with pytest.MonkeyPatch().context() as m:
|
||||
m.setenv("COMPLEX", "not json")
|
||||
|
||||
@ -111,8 +108,7 @@ async def test_config_error_env():
|
||||
Example(_env_file=(".env", ".env.example"))
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_config_without_delimiter():
|
||||
def test_config_without_delimiter():
|
||||
config = ExampleWithoutDelimiter()
|
||||
assert config.nested.a == 1
|
||||
assert config.nested.b == 0
|
||||
|
@ -1,8 +1,8 @@
|
||||
import json
|
||||
import asyncio
|
||||
from typing import Any, Optional
|
||||
from http.cookies import SimpleCookie
|
||||
|
||||
import anyio
|
||||
import pytest
|
||||
from nonebug import App
|
||||
|
||||
@ -25,7 +25,7 @@ from nonebot.drivers import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
"driver", [pytest.param("nonebot.drivers.none:Driver", id="none")], indirect=True
|
||||
)
|
||||
@ -59,22 +59,22 @@ async def test_lifespan(driver: Driver):
|
||||
|
||||
@driver.on_shutdown
|
||||
async def _shutdown1():
|
||||
assert shutdown_log == []
|
||||
assert shutdown_log == [2]
|
||||
shutdown_log.append(1)
|
||||
|
||||
@driver.on_shutdown
|
||||
async def _shutdown2():
|
||||
assert shutdown_log == [1]
|
||||
assert shutdown_log == []
|
||||
shutdown_log.append(2)
|
||||
|
||||
async with driver._lifespan:
|
||||
assert start_log == [1, 2]
|
||||
assert ready_log == [1, 2]
|
||||
|
||||
assert shutdown_log == [1, 2]
|
||||
assert shutdown_log == [2, 1]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
"driver",
|
||||
[
|
||||
@ -99,10 +99,10 @@ async def test_http_server(app: App, driver: Driver):
|
||||
assert response.status_code == 200
|
||||
assert response.text == "test"
|
||||
|
||||
await asyncio.sleep(1)
|
||||
await anyio.sleep(1)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
"driver",
|
||||
[
|
||||
@ -155,10 +155,10 @@ async def test_websocket_server(app: App, driver: Driver):
|
||||
|
||||
await ws.close(code=1000)
|
||||
|
||||
await asyncio.sleep(1)
|
||||
await anyio.sleep(1)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
"driver",
|
||||
[
|
||||
@ -171,9 +171,10 @@ async def test_cross_context(app: App, driver: Driver):
|
||||
assert isinstance(driver, ASGIMixin)
|
||||
|
||||
ws: Optional[WebSocket] = None
|
||||
ws_ready = asyncio.Event()
|
||||
ws_should_close = asyncio.Event()
|
||||
ws_ready = anyio.Event()
|
||||
ws_should_close = anyio.Event()
|
||||
|
||||
# create a background task before the ws connection established
|
||||
async def background_task():
|
||||
try:
|
||||
await ws_ready.wait()
|
||||
@ -185,8 +186,6 @@ async def test_cross_context(app: App, driver: Driver):
|
||||
finally:
|
||||
ws_should_close.set()
|
||||
|
||||
task = asyncio.create_task(background_task())
|
||||
|
||||
async def _handle_ws(websocket: WebSocket) -> None:
|
||||
nonlocal ws
|
||||
await websocket.accept()
|
||||
@ -199,7 +198,9 @@ async def test_cross_context(app: App, driver: Driver):
|
||||
ws_setup = WebSocketServerSetup(URL("/ws_test"), "ws_test", _handle_ws)
|
||||
driver.setup_websocket_server(ws_setup)
|
||||
|
||||
async with app.test_server(driver.asgi) as ctx:
|
||||
async with anyio.create_task_group() as tg, app.test_server(driver.asgi) as ctx:
|
||||
tg.start_soon(background_task)
|
||||
|
||||
client = ctx.get_client()
|
||||
|
||||
async with client.websocket_connect("/ws_test") as websocket:
|
||||
@ -211,11 +212,10 @@ async def test_cross_context(app: App, driver: Driver):
|
||||
if not e.args or "websocket.close" not in str(e.args[0]):
|
||||
raise
|
||||
|
||||
await task
|
||||
await asyncio.sleep(1)
|
||||
await anyio.sleep(1)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
"driver",
|
||||
[
|
||||
@ -304,10 +304,10 @@ async def test_http_client(driver: Driver, server_url: URL):
|
||||
"test3": "test",
|
||||
}, "file parsing error"
|
||||
|
||||
await asyncio.sleep(1)
|
||||
await anyio.sleep(1)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
"driver",
|
||||
[
|
||||
@ -419,10 +419,10 @@ async def test_http_client_session(driver: Driver, server_url: URL):
|
||||
"test3": "test",
|
||||
}, "file parsing error"
|
||||
|
||||
await asyncio.sleep(1)
|
||||
await anyio.sleep(1)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
"driver",
|
||||
[
|
||||
@ -452,10 +452,9 @@ async def test_websocket_client(driver: Driver, server_url: URL):
|
||||
with pytest.raises(WebSocketClosed, match=r"code=1000"):
|
||||
await ws.receive()
|
||||
|
||||
await asyncio.sleep(1)
|
||||
await anyio.sleep(1)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
("driver", "driver_type"),
|
||||
[
|
||||
@ -472,11 +471,11 @@ async def test_websocket_client(driver: Driver, server_url: URL):
|
||||
],
|
||||
indirect=["driver"],
|
||||
)
|
||||
async def test_combine_driver(driver: Driver, driver_type: str):
|
||||
def test_combine_driver(driver: Driver, driver_type: str):
|
||||
assert driver.type == driver_type
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_bot_connect_hook(app: App, driver: Driver):
|
||||
with pytest.MonkeyPatch.context() as m:
|
||||
conn_hooks: set[Dependent[Any]] = set()
|
||||
@ -533,7 +532,7 @@ async def test_bot_connect_hook(app: App, driver: Driver):
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
|
||||
await asyncio.sleep(1)
|
||||
await anyio.sleep(1)
|
||||
|
||||
if not conn_should_be_called:
|
||||
pytest.fail("on_bot_connect hook not called")
|
||||
|
@ -4,7 +4,7 @@ from nonebug import App
|
||||
from utils import FakeMessage, FakeMessageSegment, make_fake_event
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_echo(app: App):
|
||||
from nonebot.plugins.echo import echo
|
||||
|
||||
|
@ -14,8 +14,7 @@ from nonebot import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_init():
|
||||
def test_init():
|
||||
env = nonebot.get_driver().env
|
||||
assert env == "test"
|
||||
|
||||
@ -35,31 +34,28 @@ async def test_init():
|
||||
assert config.not_nested == "some string"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_driver(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
def test_get_driver(monkeypatch: pytest.MonkeyPatch):
|
||||
with monkeypatch.context() as m:
|
||||
m.setattr(nonebot, "_driver", None)
|
||||
with pytest.raises(ValueError, match="initialized"):
|
||||
get_driver()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_asgi(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
def test_get_asgi():
|
||||
driver = get_driver()
|
||||
assert isinstance(driver, ReverseDriver)
|
||||
assert isinstance(driver, ASGIMixin)
|
||||
assert get_asgi() == driver.asgi
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_app(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
def test_get_app():
|
||||
driver = get_driver()
|
||||
assert isinstance(driver, ReverseDriver)
|
||||
assert isinstance(driver, ASGIMixin)
|
||||
assert get_app() == driver.server_app
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_get_adapter(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
async with app.test_api() as ctx:
|
||||
adapter = ctx.create_adapter()
|
||||
@ -74,8 +70,7 @@ async def test_get_adapter(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
get_adapter("not exist")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_run(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
def test_run(monkeypatch: pytest.MonkeyPatch):
|
||||
runned = False
|
||||
|
||||
def mock_run(*args, **kwargs):
|
||||
@ -93,8 +88,7 @@ async def test_run(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
assert runned
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_bot(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
def test_get_bot(app: App, monkeypatch: pytest.MonkeyPatch):
|
||||
driver = get_driver()
|
||||
|
||||
with pytest.raises(ValueError, match="no bots"):
|
||||
|
@ -12,8 +12,7 @@ from nonebot.permission import User, Permission
|
||||
from nonebot.message import _check_matcher, check_and_run_matcher
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_matcher_info(app: App):
|
||||
def test_matcher_info(app: App):
|
||||
from plugins.matcher.matcher_info import matcher
|
||||
|
||||
assert issubclass(matcher, Matcher)
|
||||
@ -43,7 +42,7 @@ async def test_matcher_info(app: App):
|
||||
assert matcher._source.lineno == 3
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_matcher_check(app: App):
|
||||
async def falsy():
|
||||
return False
|
||||
@ -87,7 +86,7 @@ async def test_matcher_check(app: App):
|
||||
assert await _check_matcher(test_rule_error, bot, event, {}) is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_matcher_handle(app: App):
|
||||
from plugins.matcher.matcher_process import test_handle
|
||||
|
||||
@ -102,7 +101,7 @@ async def test_matcher_handle(app: App):
|
||||
ctx.should_finished()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_matcher_got(app: App):
|
||||
from plugins.matcher.matcher_process import test_got
|
||||
|
||||
@ -124,7 +123,7 @@ async def test_matcher_got(app: App):
|
||||
ctx.receive_event(bot, event_next)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_matcher_receive(app: App):
|
||||
from plugins.matcher.matcher_process import test_receive
|
||||
|
||||
@ -141,7 +140,7 @@ async def test_matcher_receive(app: App):
|
||||
ctx.should_paused()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_matcher_combine(app: App):
|
||||
from plugins.matcher.matcher_process import test_combine
|
||||
|
||||
@ -164,7 +163,7 @@ async def test_matcher_combine(app: App):
|
||||
ctx.receive_event(bot, event_next)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_matcher_preset(app: App):
|
||||
from plugins.matcher.matcher_process import test_preset
|
||||
|
||||
@ -182,7 +181,7 @@ async def test_matcher_preset(app: App):
|
||||
ctx.receive_event(bot, event_next)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_matcher_overload(app: App):
|
||||
from plugins.matcher.matcher_process import test_overload
|
||||
|
||||
@ -196,7 +195,7 @@ async def test_matcher_overload(app: App):
|
||||
ctx.should_finished()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_matcher_destroy(app: App):
|
||||
from plugins.matcher.matcher_process import test_destroy
|
||||
|
||||
@ -210,7 +209,7 @@ async def test_matcher_destroy(app: App):
|
||||
assert len(matchers[test_destroy.priority]) == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_type_updater(app: App):
|
||||
from plugins.matcher.matcher_type import test_type_updater, test_custom_updater
|
||||
|
||||
@ -231,7 +230,7 @@ async def test_type_updater(app: App):
|
||||
assert new_type == "custom"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_default_permission_updater(app: App):
|
||||
from plugins.matcher.matcher_permission import (
|
||||
default_permission,
|
||||
@ -252,7 +251,7 @@ async def test_default_permission_updater(app: App):
|
||||
assert checker.perm is default_permission
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_user_permission_updater(app: App):
|
||||
from plugins.matcher.matcher_permission import (
|
||||
default_permission,
|
||||
@ -274,7 +273,7 @@ async def test_user_permission_updater(app: App):
|
||||
assert checker.perm is default_permission
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_custom_permission_updater(app: App):
|
||||
from plugins.matcher.matcher_permission import (
|
||||
new_permission,
|
||||
@ -291,7 +290,7 @@ async def test_custom_permission_updater(app: App):
|
||||
assert new_perm is new_permission
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_run(app: App):
|
||||
with app.provider.context({}):
|
||||
assert not matchers
|
||||
@ -322,37 +321,46 @@ async def test_run(app: App):
|
||||
assert len(matchers[0][0].handlers) == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_temp(app: App):
|
||||
from plugins.matcher.matcher_expire import test_temp_matcher
|
||||
|
||||
event = make_fake_event(_type="test")()
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
assert test_temp_matcher in matchers[test_temp_matcher.priority]
|
||||
await check_and_run_matcher(test_temp_matcher, bot, event, {})
|
||||
assert test_temp_matcher not in matchers[test_temp_matcher.priority]
|
||||
with app.provider.context({test_temp_matcher.priority: [test_temp_matcher]}):
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
assert test_temp_matcher in matchers[test_temp_matcher.priority]
|
||||
await check_and_run_matcher(test_temp_matcher, bot, event, {})
|
||||
assert test_temp_matcher not in matchers[test_temp_matcher.priority]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_datetime_expire(app: App):
|
||||
from plugins.matcher.matcher_expire import test_datetime_matcher
|
||||
|
||||
event = make_fake_event()()
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
assert test_datetime_matcher in matchers[test_datetime_matcher.priority]
|
||||
await check_and_run_matcher(test_datetime_matcher, bot, event, {})
|
||||
assert test_datetime_matcher not in matchers[test_datetime_matcher.priority]
|
||||
with app.provider.context(
|
||||
{test_datetime_matcher.priority: [test_datetime_matcher]}
|
||||
):
|
||||
async with app.test_matcher(test_datetime_matcher) as ctx:
|
||||
bot = ctx.create_bot()
|
||||
assert test_datetime_matcher in matchers[test_datetime_matcher.priority]
|
||||
await check_and_run_matcher(test_datetime_matcher, bot, event, {})
|
||||
assert test_datetime_matcher not in matchers[test_datetime_matcher.priority]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_timedelta_expire(app: App):
|
||||
from plugins.matcher.matcher_expire import test_timedelta_matcher
|
||||
|
||||
event = make_fake_event()()
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
assert test_timedelta_matcher in matchers[test_timedelta_matcher.priority]
|
||||
await check_and_run_matcher(test_timedelta_matcher, bot, event, {})
|
||||
assert test_timedelta_matcher not in matchers[test_timedelta_matcher.priority]
|
||||
with app.provider.context(
|
||||
{test_timedelta_matcher.priority: [test_timedelta_matcher]}
|
||||
):
|
||||
async with app.test_api() as ctx:
|
||||
bot = ctx.create_bot()
|
||||
assert test_timedelta_matcher in matchers[test_timedelta_matcher.priority]
|
||||
await check_and_run_matcher(test_timedelta_matcher, bot, event, {})
|
||||
assert (
|
||||
test_timedelta_matcher not in matchers[test_timedelta_matcher.priority]
|
||||
)
|
||||
|
@ -1,11 +1,9 @@
|
||||
import pytest
|
||||
from nonebug import App
|
||||
|
||||
from nonebot.matcher import DEFAULT_PROVIDER_CLASS, matchers
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_manager(app: App):
|
||||
def test_manager(app: App):
|
||||
try:
|
||||
default_provider = matchers.provider
|
||||
matchers.set_provider(DEFAULT_PROVIDER_CLASS)
|
||||
|
@ -2,6 +2,7 @@ import re
|
||||
|
||||
import pytest
|
||||
from nonebug import App
|
||||
from exceptiongroup import BaseExceptionGroup
|
||||
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot.dependencies import Dependent
|
||||
@ -36,7 +37,7 @@ from nonebot.consts import (
|
||||
UNKNOWN_PARAM = "Unknown parameter"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_depend(app: App):
|
||||
from plugins.param.param_depend import (
|
||||
ClassDependency,
|
||||
@ -90,36 +91,47 @@ async def test_depend(app: App):
|
||||
|
||||
assert runned == [1, 1, 1]
|
||||
|
||||
runned.clear()
|
||||
|
||||
async with app.test_dependent(
|
||||
annotated_class_depend, allow_types=[DependParam]
|
||||
) as ctx:
|
||||
ctx.should_return(ClassDependency(x=1, y=2))
|
||||
|
||||
with pytest.raises(TypeMisMatch): # noqa: PT012
|
||||
with pytest.raises((TypeMisMatch, BaseExceptionGroup)) as exc_info: # noqa: PT012
|
||||
async with app.test_dependent(
|
||||
sub_type_mismatch, allow_types=[DependParam, BotParam]
|
||||
) as ctx:
|
||||
bot = ctx.create_bot()
|
||||
ctx.pass_params(bot=bot)
|
||||
|
||||
if isinstance(exc_info.value, BaseExceptionGroup):
|
||||
assert exc_info.group_contains(TypeMisMatch)
|
||||
|
||||
async with app.test_dependent(validate, allow_types=[DependParam]) as ctx:
|
||||
ctx.should_return(1)
|
||||
|
||||
with pytest.raises(TypeMisMatch):
|
||||
with pytest.raises((TypeMisMatch, BaseExceptionGroup)) as exc_info:
|
||||
async with app.test_dependent(validate_fail, allow_types=[DependParam]) as ctx:
|
||||
...
|
||||
|
||||
if isinstance(exc_info.value, BaseExceptionGroup):
|
||||
assert exc_info.group_contains(TypeMisMatch)
|
||||
|
||||
async with app.test_dependent(validate_field, allow_types=[DependParam]) as ctx:
|
||||
ctx.should_return(1)
|
||||
|
||||
with pytest.raises(TypeMisMatch):
|
||||
with pytest.raises((TypeMisMatch, BaseExceptionGroup)) as exc_info:
|
||||
async with app.test_dependent(
|
||||
validate_field_fail, allow_types=[DependParam]
|
||||
) as ctx:
|
||||
...
|
||||
|
||||
if isinstance(exc_info.value, BaseExceptionGroup):
|
||||
assert exc_info.group_contains(TypeMisMatch)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_bot(app: App):
|
||||
from plugins.param.param_bot import (
|
||||
FooBot,
|
||||
@ -157,11 +169,14 @@ async def test_bot(app: App):
|
||||
ctx.pass_params(bot=bot)
|
||||
ctx.should_return(bot)
|
||||
|
||||
with pytest.raises(TypeMisMatch): # noqa: PT012
|
||||
with pytest.raises((TypeMisMatch, BaseExceptionGroup)) as exc_info: # noqa: PT012
|
||||
async with app.test_dependent(sub_bot, allow_types=[BotParam]) as ctx:
|
||||
bot = ctx.create_bot()
|
||||
ctx.pass_params(bot=bot)
|
||||
|
||||
if isinstance(exc_info.value, BaseExceptionGroup):
|
||||
assert exc_info.group_contains(TypeMisMatch)
|
||||
|
||||
async with app.test_dependent(union_bot, allow_types=[BotParam]) as ctx:
|
||||
bot = ctx.create_bot(base=FooBot)
|
||||
ctx.pass_params(bot=bot)
|
||||
@ -181,7 +196,7 @@ async def test_bot(app: App):
|
||||
app.test_dependent(not_bot, allow_types=[BotParam])
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_event(app: App):
|
||||
from plugins.param.param_event import (
|
||||
FooEvent,
|
||||
@ -223,10 +238,13 @@ async def test_event(app: App):
|
||||
ctx.pass_params(event=fake_fooevent)
|
||||
ctx.should_return(fake_fooevent)
|
||||
|
||||
with pytest.raises(TypeMisMatch):
|
||||
with pytest.raises((TypeMisMatch, BaseExceptionGroup)) as exc_info:
|
||||
async with app.test_dependent(sub_event, allow_types=[EventParam]) as ctx:
|
||||
ctx.pass_params(event=fake_event)
|
||||
|
||||
if isinstance(exc_info.value, BaseExceptionGroup):
|
||||
assert exc_info.group_contains(TypeMisMatch)
|
||||
|
||||
async with app.test_dependent(union_event, allow_types=[EventParam]) as ctx:
|
||||
ctx.pass_params(event=fake_fooevent)
|
||||
ctx.should_return(fake_fooevent)
|
||||
@ -267,7 +285,7 @@ async def test_event(app: App):
|
||||
ctx.should_return(fake_event.is_tome())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_state(app: App):
|
||||
from plugins.param.param_state import (
|
||||
state,
|
||||
@ -418,7 +436,7 @@ async def test_state(app: App):
|
||||
ctx.should_return(fake_state[KEYWORD_KEY])
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_matcher(app: App):
|
||||
from plugins.param.param_matcher import (
|
||||
FooMatcher,
|
||||
@ -457,10 +475,13 @@ async def test_matcher(app: App):
|
||||
ctx.pass_params(matcher=foo_matcher)
|
||||
ctx.should_return(foo_matcher)
|
||||
|
||||
with pytest.raises(TypeMisMatch):
|
||||
with pytest.raises((TypeMisMatch, BaseExceptionGroup)) as exc_info:
|
||||
async with app.test_dependent(sub_matcher, allow_types=[MatcherParam]) as ctx:
|
||||
ctx.pass_params(matcher=fake_matcher)
|
||||
|
||||
if isinstance(exc_info.value, BaseExceptionGroup):
|
||||
assert exc_info.group_contains(TypeMisMatch)
|
||||
|
||||
async with app.test_dependent(union_matcher, allow_types=[MatcherParam]) as ctx:
|
||||
ctx.pass_params(matcher=foo_matcher)
|
||||
ctx.should_return(foo_matcher)
|
||||
@ -496,7 +517,7 @@ async def test_matcher(app: App):
|
||||
ctx.should_return(event_next)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_arg(app: App):
|
||||
from plugins.param.param_arg import (
|
||||
arg,
|
||||
@ -548,7 +569,7 @@ async def test_arg(app: App):
|
||||
ctx.should_return(message.extract_plain_text())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_exception(app: App):
|
||||
from plugins.param.param_exception import exc, legacy_exc
|
||||
|
||||
@ -562,7 +583,7 @@ async def test_exception(app: App):
|
||||
ctx.should_return(exception)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_default(app: App):
|
||||
from plugins.param.param_default import default
|
||||
|
||||
@ -570,8 +591,7 @@ async def test_default(app: App):
|
||||
ctx.should_return(1)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_priority():
|
||||
def test_priority():
|
||||
from plugins.param.priority import complex_priority
|
||||
|
||||
dependent = Dependent[None].parse(
|
||||
|
@ -22,7 +22,7 @@ from nonebot.permission import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_permission(app: App):
|
||||
async def falsy():
|
||||
return False
|
||||
@ -54,7 +54,7 @@ async def test_permission(app: App):
|
||||
assert await Permission(truthy, skipped)(bot, event) is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(("type", "expected"), [("message", True), ("notice", False)])
|
||||
async def test_message(type: str, expected: bool):
|
||||
dependent = next(iter(MESSAGE.checkers))
|
||||
@ -66,7 +66,7 @@ async def test_message(type: str, expected: bool):
|
||||
assert await dependent(event=event) == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(("type", "expected"), [("message", False), ("notice", True)])
|
||||
async def test_notice(type: str, expected: bool):
|
||||
dependent = next(iter(NOTICE.checkers))
|
||||
@ -78,7 +78,7 @@ async def test_notice(type: str, expected: bool):
|
||||
assert await dependent(event=event) == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(("type", "expected"), [("message", False), ("request", True)])
|
||||
async def test_request(type: str, expected: bool):
|
||||
dependent = next(iter(REQUEST.checkers))
|
||||
@ -90,7 +90,7 @@ async def test_request(type: str, expected: bool):
|
||||
assert await dependent(event=event) == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
("type", "expected"), [("message", False), ("meta_event", True)]
|
||||
)
|
||||
@ -104,7 +104,7 @@ async def test_metaevent(type: str, expected: bool):
|
||||
assert await dependent(event=event) == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
("type", "user_id", "expected"),
|
||||
[
|
||||
@ -128,7 +128,7 @@ async def test_superuser(app: App, type: str, user_id: str, expected: bool):
|
||||
assert await dependent(bot=bot, event=event) == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
("session_ids", "session_id", "expected"),
|
||||
[
|
||||
|
@ -1,12 +1,10 @@
|
||||
import pytest
|
||||
from pydantic import BaseModel
|
||||
|
||||
import nonebot
|
||||
from nonebot.plugin import PluginManager, _managers
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_plugin():
|
||||
def test_get_plugin():
|
||||
# check simple plugin
|
||||
plugin = nonebot.get_plugin("export")
|
||||
assert plugin
|
||||
@ -22,8 +20,7 @@ async def test_get_plugin():
|
||||
assert plugin.module_name == "plugins.nested.plugins.nested_subplugin"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_plugin_by_module_name():
|
||||
def test_get_plugin_by_module_name():
|
||||
# check get plugin by exact module name
|
||||
plugin = nonebot.get_plugin_by_module_name("plugins.nested")
|
||||
assert plugin
|
||||
@ -48,8 +45,7 @@ async def test_get_plugin_by_module_name():
|
||||
assert plugin.module_name == "plugins.nested.plugins.nested_subplugin"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_available_plugin():
|
||||
def test_get_available_plugin():
|
||||
old_managers = _managers.copy()
|
||||
_managers.clear()
|
||||
try:
|
||||
@ -63,8 +59,7 @@ async def test_get_available_plugin():
|
||||
_managers.extend(old_managers)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_plugin_config():
|
||||
def test_get_plugin_config():
|
||||
class Config(BaseModel):
|
||||
plugin_config: int
|
||||
|
||||
|
@ -1,15 +1,44 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from functools import wraps
|
||||
from dataclasses import asdict
|
||||
from typing import TypeVar, Callable
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
import pytest
|
||||
|
||||
import nonebot
|
||||
from nonebot.plugin import Plugin, PluginManager, _managers, inherit_supported_adapters
|
||||
from nonebot.plugin import (
|
||||
Plugin,
|
||||
PluginManager,
|
||||
_plugins,
|
||||
_managers,
|
||||
inherit_supported_adapters,
|
||||
)
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_load_plugin():
|
||||
def _recover(func: Callable[P, R]) -> Callable[P, R]:
|
||||
|
||||
@wraps(func)
|
||||
def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
origin_managers = _managers.copy()
|
||||
origin_plugins = _plugins.copy()
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
finally:
|
||||
_managers.clear()
|
||||
_managers.extend(origin_managers)
|
||||
_plugins.clear()
|
||||
_plugins.update(origin_plugins)
|
||||
|
||||
return _wrapper
|
||||
|
||||
|
||||
@_recover
|
||||
def test_load_plugin():
|
||||
# check regular
|
||||
assert nonebot.load_plugin("dynamic.simple")
|
||||
|
||||
@ -20,8 +49,7 @@ async def test_load_plugin():
|
||||
assert nonebot.load_plugin("some_plugin_not_exist") is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_load_plugins(load_plugin: set[Plugin], load_builtin_plugin: set[Plugin]):
|
||||
def test_load_plugins(load_plugin: set[Plugin], load_builtin_plugin: set[Plugin]):
|
||||
loaded_plugins = {
|
||||
plugin for plugin in nonebot.get_loaded_plugins() if not plugin.parent_plugin
|
||||
}
|
||||
@ -44,8 +72,7 @@ async def test_load_plugins(load_plugin: set[Plugin], load_builtin_plugin: set[P
|
||||
PluginManager(search_path=["plugins"]).load_all_plugins()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_load_nested_plugin():
|
||||
def test_load_nested_plugin():
|
||||
parent_plugin = nonebot.get_plugin("nested")
|
||||
sub_plugin = nonebot.get_plugin("nested:nested_subplugin")
|
||||
sub_plugin2 = nonebot.get_plugin("nested:nested_subplugin2")
|
||||
@ -57,16 +84,16 @@ async def test_load_nested_plugin():
|
||||
assert parent_plugin.sub_plugins == {sub_plugin, sub_plugin2}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_load_json():
|
||||
@_recover
|
||||
def test_load_json():
|
||||
nonebot.load_from_json("./plugins.json")
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
nonebot.load_from_json("./plugins.invalid.json")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_load_toml():
|
||||
@_recover
|
||||
def test_load_toml():
|
||||
nonebot.load_from_toml("./plugins.toml")
|
||||
|
||||
with pytest.raises(ValueError, match="Cannot find"):
|
||||
@ -76,52 +103,54 @@ async def test_load_toml():
|
||||
nonebot.load_from_toml("./plugins.invalid.toml")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bad_plugin():
|
||||
@_recover
|
||||
def test_bad_plugin():
|
||||
nonebot.load_plugins("bad_plugins")
|
||||
|
||||
assert nonebot.get_plugin("bad_plugin") is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_require_loaded(monkeypatch: pytest.MonkeyPatch):
|
||||
@_recover
|
||||
def test_require_loaded(monkeypatch: pytest.MonkeyPatch):
|
||||
def _patched_find(name: str):
|
||||
pytest.fail("require existing plugin should not call find_manager_by_name")
|
||||
|
||||
monkeypatch.setattr("nonebot.plugin.load._find_manager_by_name", _patched_find)
|
||||
with monkeypatch.context() as m:
|
||||
m.setattr("nonebot.plugin.load._find_manager_by_name", _patched_find)
|
||||
|
||||
# require use module name
|
||||
nonebot.require("plugins.export")
|
||||
# require use plugin id
|
||||
nonebot.require("export")
|
||||
nonebot.require("nested:nested_subplugin")
|
||||
# require use module name
|
||||
nonebot.require("plugins.export")
|
||||
# require use plugin id
|
||||
nonebot.require("export")
|
||||
nonebot.require("nested:nested_subplugin")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_require_not_loaded(monkeypatch: pytest.MonkeyPatch):
|
||||
m = PluginManager(["dynamic.require_not_loaded"], ["dynamic/require_not_loaded/"])
|
||||
_managers.append(m)
|
||||
@_recover
|
||||
def test_require_not_loaded(monkeypatch: pytest.MonkeyPatch):
|
||||
pm = PluginManager(["dynamic.require_not_loaded"], ["dynamic/require_not_loaded/"])
|
||||
_managers.append(pm)
|
||||
num_managers = len(_managers)
|
||||
|
||||
origin_load = PluginManager.load_plugin
|
||||
|
||||
def _patched_load(self: PluginManager, name: str):
|
||||
assert self is m
|
||||
assert self is pm
|
||||
return origin_load(self, name)
|
||||
|
||||
monkeypatch.setattr(PluginManager, "load_plugin", _patched_load)
|
||||
with monkeypatch.context() as m:
|
||||
m.setattr(PluginManager, "load_plugin", _patched_load)
|
||||
|
||||
# require standalone plugin
|
||||
nonebot.require("dynamic.require_not_loaded")
|
||||
# require searched plugin
|
||||
nonebot.require("dynamic.require_not_loaded.subplugin1")
|
||||
nonebot.require("require_not_loaded:subplugin2")
|
||||
# require standalone plugin
|
||||
nonebot.require("dynamic.require_not_loaded")
|
||||
# require searched plugin
|
||||
nonebot.require("dynamic.require_not_loaded.subplugin1")
|
||||
nonebot.require("require_not_loaded:subplugin2")
|
||||
|
||||
assert len(_managers) == num_managers
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_require_not_declared():
|
||||
@_recover
|
||||
def test_require_not_declared():
|
||||
num_managers = len(_managers)
|
||||
|
||||
nonebot.require("dynamic.require_not_declared")
|
||||
@ -130,14 +159,13 @@ async def test_require_not_declared():
|
||||
assert _managers[-1].plugins == {"dynamic.require_not_declared"}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_require_not_found():
|
||||
@_recover
|
||||
def test_require_not_found():
|
||||
with pytest.raises(RuntimeError):
|
||||
nonebot.require("some_plugin_not_exist")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_plugin_metadata():
|
||||
def test_plugin_metadata():
|
||||
from plugins.metadata import Config, FakeAdapter
|
||||
|
||||
plugin = nonebot.get_plugin("metadata")
|
||||
@ -157,8 +185,7 @@ async def test_plugin_metadata():
|
||||
assert plugin.metadata.get_supported_adapters() == {FakeAdapter}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_inherit_supported_adapters_not_found():
|
||||
def test_inherit_supported_adapters_not_found():
|
||||
with pytest.raises(RuntimeError):
|
||||
inherit_supported_adapters("some_plugin_not_exist")
|
||||
|
||||
@ -166,7 +193,6 @@ async def test_inherit_supported_adapters_not_found():
|
||||
inherit_supported_adapters("export")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
("inherit_plugins", "expected"),
|
||||
[
|
||||
@ -233,7 +259,7 @@ async def test_inherit_supported_adapters_not_found():
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_inherit_supported_adapters_combine(
|
||||
def test_inherit_supported_adapters_combine(
|
||||
inherit_plugins: tuple[str], expected: set[str]
|
||||
):
|
||||
assert inherit_supported_adapters(*inherit_plugins) == expected
|
||||
|
@ -1,17 +1,17 @@
|
||||
import pytest
|
||||
|
||||
from nonebot.plugin import PluginManager, _managers
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_load_plugin_name():
|
||||
def test_load_plugin_name():
|
||||
m = PluginManager(plugins=["dynamic.manager"])
|
||||
_managers.append(m)
|
||||
try:
|
||||
_managers.append(m)
|
||||
|
||||
# load by plugin id
|
||||
module1 = m.load_plugin("manager")
|
||||
# load by module name
|
||||
module2 = m.load_plugin("dynamic.manager")
|
||||
assert module1
|
||||
assert module2
|
||||
assert module1 is module2
|
||||
# load by plugin id
|
||||
module1 = m.load_plugin("manager")
|
||||
# load by module name
|
||||
module2 = m.load_plugin("dynamic.manager")
|
||||
assert module1
|
||||
assert module2
|
||||
assert module1 is module2
|
||||
finally:
|
||||
_managers.remove(m)
|
||||
|
@ -18,7 +18,6 @@ from nonebot.rule import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
("matcher_name", "pre_rule_factory", "has_permission"),
|
||||
[
|
||||
@ -102,7 +101,7 @@ from nonebot.rule import (
|
||||
pytest.param("matcher_group_on_type", lambda e: IsTypeRule(e), True),
|
||||
],
|
||||
)
|
||||
async def test_on(
|
||||
def test_on(
|
||||
matcher_name: str,
|
||||
pre_rule_factory: Optional[Callable[[type[Event]], T_RuleChecker]],
|
||||
has_permission: bool,
|
||||
@ -150,8 +149,7 @@ async def test_on(
|
||||
assert matcher.module_name == "plugins.plugin.matchers"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_runtime_on():
|
||||
def test_runtime_on():
|
||||
import plugins.plugin.matchers as module
|
||||
from plugins.plugin.matchers import matcher_on_factory
|
||||
|
||||
|
@ -49,7 +49,7 @@ from nonebot.rule import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_rule(app: App):
|
||||
async def falsy():
|
||||
return False
|
||||
@ -81,7 +81,7 @@ async def test_rule(app: App):
|
||||
assert await Rule(truthy, skipped)(bot, event, {}) is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_trie(app: App):
|
||||
TrieRule.add_prefix("/fake-prefix", TRIE_VALUE("/", ("fake-prefix",)))
|
||||
|
||||
@ -146,7 +146,7 @@ async def test_trie(app: App):
|
||||
del TrieRule.prefix["/fake-prefix"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
("msg", "ignorecase", "type", "text", "expected"),
|
||||
[
|
||||
@ -186,7 +186,7 @@ async def test_startswith(
|
||||
assert await dependent(event=event, state=state) == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
("msg", "ignorecase", "type", "text", "expected"),
|
||||
[
|
||||
@ -226,7 +226,7 @@ async def test_endswith(
|
||||
assert await dependent(event=event, state=state) == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
("msg", "ignorecase", "type", "text", "expected"),
|
||||
[
|
||||
@ -266,7 +266,7 @@ async def test_fullmatch(
|
||||
assert await dependent(event=event, state=state) == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
("kws", "type", "text", "expected"),
|
||||
[
|
||||
@ -298,7 +298,7 @@ async def test_keyword(
|
||||
assert await dependent(event=event, state=state) == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
("cmds", "force_whitespace", "cmd", "whitespace", "arg_text", "expected"),
|
||||
[
|
||||
@ -344,7 +344,7 @@ async def test_command(
|
||||
assert await dependent(state=state) == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_shell_command():
|
||||
state: T_State
|
||||
CMD = ("test",)
|
||||
@ -451,7 +451,7 @@ async def test_shell_command():
|
||||
assert state[SHELL_ARGS].status != 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
("pattern", "type", "text", "expected", "matched"),
|
||||
[
|
||||
@ -494,7 +494,7 @@ async def test_regex(
|
||||
assert result.span() == matched.span()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize("expected", [True, False])
|
||||
async def test_to_me(expected: bool):
|
||||
test_to_me = to_me()
|
||||
@ -507,7 +507,7 @@ async def test_to_me(expected: bool):
|
||||
assert await dependent(event=event) == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_is_type():
|
||||
Event1 = make_fake_event()
|
||||
Event2 = make_fake_event()
|
||||
|
@ -5,7 +5,7 @@ import pytest
|
||||
from utils import make_fake_event
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.anyio
|
||||
async def test_matcher_mutex():
|
||||
from nonebot.plugins.single_session import matcher_mutex, _running_matcher
|
||||
|
||||
|
Reference in New Issue
Block a user