分离magicocacroterline

This commit is contained in:
2024-10-13 02:51:33 +08:00
parent a77f97fd4b
commit db385f597b
91 changed files with 3681 additions and 3117 deletions

View File

@ -8,18 +8,21 @@ import threading
import time
from typing import Any, Optional
from liteyuki.bot.lifespan import (LIFESPAN_FUNC, Lifespan, PROCESS_LIFESPAN_FUNC)
from liteyuki.bot.lifespan import LIFESPAN_FUNC, Lifespan, PROCESS_LIFESPAN_FUNC
from liteyuki.comm.channel import get_channel
from liteyuki.core.manager import ProcessManager
from liteyuki.log import init_log, logger
from liteyuki.plugin import load_plugin
from liteyuki.utils import IS_MAIN_PROCESS
# new version
from liteyuki.core.manager import sub_process_manager
__all__ = [
"LiteyukiBot",
"get_bot",
"get_config",
"get_config_with_compat",
"LiteyukiBot",
"get_bot",
"get_config",
"get_config_with_compat",
]
@ -60,6 +63,7 @@ class LiteyukiBot:
启动逻辑
"""
await self.lifespan.before_start() # 启动前钩子
sub_process_manager.start_all()
await self.lifespan.after_start() # 启动后钩子
await self.keep_alive()
@ -108,7 +112,11 @@ class LiteyukiBot:
cmd = "nohup"
self.process_manager.terminate_all()
# 进程退出后重启
threading.Thread(target=os.system, args=(f"{cmd} {executable} {' '.join(args)}",), daemon=True).start()
threading.Thread(
target=os.system,
args=(f"{cmd} {executable} {' '.join(args)}",),
daemon=True,
).start()
sys.exit(0)
self.call_restart_count += 1
@ -189,7 +197,9 @@ class LiteyukiBot:
"""
return self.lifespan.on_before_process_shutdown(func)
def on_before_process_restart(self, func: PROCESS_LIFESPAN_FUNC) -> PROCESS_LIFESPAN_FUNC:
def on_before_process_restart(
self, func: PROCESS_LIFESPAN_FUNC
) -> PROCESS_LIFESPAN_FUNC:
"""
注册进程重启前的函数,为子进程重启时调用
Args:
@ -211,7 +221,7 @@ class LiteyukiBot:
return self.lifespan.on_after_restart(func)
_BOT_INSTANCE: LiteyukiBot
_BOT_INSTANCE: LiteyukiBot | None = None
def get_bot() -> LiteyukiBot:
@ -241,7 +251,9 @@ def get_config(key: str, default: Any = None) -> Any:
return get_bot().config.get(key, default)
def get_config_with_compat(key: str, compat_keys: tuple[str], default: Any = None) -> Any:
def get_config_with_compat(
key: str, compat_keys: tuple[str], default: Any = None
) -> Any:
"""
获取配置,兼容旧版本
Args:
@ -256,14 +268,18 @@ def get_config_with_compat(key: str, compat_keys: tuple[str], default: Any = Non
return get_bot().config[key]
for compat_key in compat_keys:
if compat_key in get_bot().config:
logger.warning(f"Config key \"{compat_key}\" will be deprecated, use \"{key}\" instead.")
logger.warning(
f'Config key "{compat_key}" will be deprecated, use "{key}" instead.'
)
return get_bot().config[compat_key]
return default
def print_logo():
"""@litedoc-hide"""
print("\033[34m" + r"""
print(
"\033[34m"
+ r"""
__ ______ ________ ________ __ __ __ __ __ __ ______
/ | / |/ |/ |/ \ / |/ | / |/ | / |/ |
$$ | $$$$$$/ $$$$$$$$/ $$$$$$$$/ $$ \ /$$/ $$ | $$ |$$ | /$$/ $$$$$$/
@ -273,4 +289,6 @@ def print_logo():
$$ |_____ _$$ |_ $$ | $$ |_____ $$ | $$ \__$$ |$$ |$$ \ _$$ |_
$$ |/ $$ | $$ | $$ | $$ | $$ $$/ $$ | $$ |/ $$ |
$$$$$$$$/ $$$$$$/ $$/ $$$$$$$$/ $$/ $$$$$$/ $$/ $$/ $$$$$$/
""" + "\033[0m")
"""
+ "\033[0m"
)

View File

@ -4,20 +4,31 @@
"""
import asyncio
from multiprocessing import Pipe
from typing import Any, Callable, Coroutine, Generic, Optional, TypeAlias, TypeVar, get_args
from typing import (
Any,
Callable,
Coroutine,
Generic,
Optional,
TypeAlias,
TypeVar,
get_args,
)
from liteyuki.log import logger
from liteyuki.utils import IS_MAIN_PROCESS, is_coroutine_callable
T = TypeVar("T")
SYNC_ON_RECEIVE_FUNC: TypeAlias = Callable[[T], Any] # 同步接收函数
ASYNC_ON_RECEIVE_FUNC: TypeAlias = Callable[[T], Coroutine[Any, Any, Any]] # 异步接收函数
ON_RECEIVE_FUNC: TypeAlias = SYNC_ON_RECEIVE_FUNC | ASYNC_ON_RECEIVE_FUNC # 接收函数
SYNC_ON_RECEIVE_FUNC: TypeAlias = Callable[[T], Any] # 同步接收函数
ASYNC_ON_RECEIVE_FUNC: TypeAlias = Callable[
[T], Coroutine[Any, Any, Any]
] # 异步接收函数
ON_RECEIVE_FUNC: TypeAlias = SYNC_ON_RECEIVE_FUNC | ASYNC_ON_RECEIVE_FUNC # 接收函数
SYNC_FILTER_FUNC: TypeAlias = Callable[[T], bool] # 同步过滤函数
ASYNC_FILTER_FUNC: TypeAlias = Callable[[T], Coroutine[Any, Any, bool]] # 异步过滤函数
FILTER_FUNC: TypeAlias = SYNC_FILTER_FUNC | ASYNC_FILTER_FUNC # 过滤函数
SYNC_FILTER_FUNC: TypeAlias = Callable[[T], bool] # 同步过滤函数
ASYNC_FILTER_FUNC: TypeAlias = Callable[[T], Coroutine[Any, Any, bool]] # 异步过滤函数
FILTER_FUNC: TypeAlias = SYNC_FILTER_FUNC | ASYNC_FILTER_FUNC # 过滤函数
_func_id: int = 0
_channel: dict[str, "Channel"] = {}
@ -39,7 +50,9 @@ class Channel(Generic[T]):
"""
self.conn_send, self.conn_recv = Pipe()
self._conn_send_inner, self._conn_recv_inner = Pipe() # 内部通道,用于子进程通信
self._conn_send_inner, self._conn_recv_inner = (
Pipe()
) # 内部通道,用于子进程通信
self._closed = False
self._on_main_receive_func_ids: list[int] = []
self._on_sub_receive_func_ids: list[int] = []
@ -64,7 +77,9 @@ class Channel(Generic[T]):
_channel[name] = self
logger.debug(f"Channel {name} initialized in main process")
else:
logger.debug(f"Channel {name} initialized in sub process, should manually set in main process")
logger.debug(
f"Channel {name} initialized in sub process, should manually set in main process"
)
def _get_generic_type(self) -> Optional[type]:
"""
@ -72,7 +87,7 @@ class Channel(Generic[T]):
Returns:
Optional[type]: 泛型类型
"""
if hasattr(self, '__orig_class__'):
if hasattr(self, "__orig_class__"):
return get_args(self.__orig_class__)[0]
return None
@ -98,7 +113,10 @@ class Channel(Generic[T]):
elif isinstance(structure, dict):
if not isinstance(data, dict):
return False
return all(k in data and self._validate_structure(data[k], structure[k]) for k in structure)
return all(
k in data and self._validate_structure(data[k], structure[k])
for k in structure
)
return False
def __str__(self):
@ -113,10 +131,12 @@ class Channel(Generic[T]):
if self.type_check:
_type = self._get_generic_type()
if _type is not None and not self._validate_structure(data, _type):
raise TypeError(f"Data must be an instance of {_type}, {type(data)} found")
raise TypeError(
f"Data must be an instance of {_type}, {type(data)} found"
)
if self._closed:
raise RuntimeError("Cannot send to a closed channel_")
raise RuntimeError("Cannot send to a closed channel")
self.conn_send.send(data)
def receive(self) -> T:
@ -126,7 +146,7 @@ class Channel(Generic[T]):
T: 数据
"""
if self._closed:
raise RuntimeError("Cannot receive from a closed channel_")
raise RuntimeError("Cannot receive from a closed channel")
while True:
data = self.conn_recv.recv()
@ -142,7 +162,9 @@ class Channel(Generic[T]):
data = await loop.run_in_executor(None, self.receive)
return data
def on_receive(self, filter_func: Optional[FILTER_FUNC] = None) -> Callable[[Callable[[T], Any]], Callable[[T], Any]]:
def on_receive(
self, filter_func: Optional[FILTER_FUNC] = None
) -> Callable[[Callable[[T], Any]], Callable[[T], Any]]:
"""
接收数据并执行函数
Args:
@ -187,37 +209,52 @@ class Channel(Generic[T]):
data: 数据
"""
if IS_MAIN_PROCESS:
[asyncio.create_task(_callback_funcs[func_id](data)) for func_id in self._on_main_receive_func_ids]
[
asyncio.create_task(_callback_funcs[func_id](data))
for func_id in self._on_main_receive_func_ids
]
else:
[asyncio.create_task(_callback_funcs[func_id](data)) for func_id in self._on_sub_receive_func_ids]
[
asyncio.create_task(_callback_funcs[func_id](data))
for func_id in self._on_sub_receive_func_ids
]
"""子进程可用的主动和被动通道"""
active_channel: Channel = Channel(name="active_channel") # 主动通道
active_channel: Channel = Channel(name="active_channel") # 主动通道
passive_channel: Channel = Channel(name="passive_channel") # 被动通道
publish_channel: Channel[tuple[str, dict[str, Any]]] = Channel(name="publish_channel") # 发布通道
publish_channel: Channel[tuple[str, dict[str, Any]]] = Channel(
name="publish_channel"
) # 发布通道
"""通道传递通道,主进程创建单例,子进程初始化时实例化"""
channel_deliver_active_channel: Channel[Channel[Any]] # 主动通道传递通道
channel_deliver_passive_channel: Channel[tuple[str, dict[str, Any]]] # 被动通道传递通道
channel_deliver_active_channel: Channel[Channel[Any]] # 主动通道传递通道
channel_deliver_passive_channel: Channel[tuple[str, dict[str, Any]]] # 被动通道传递通道
if IS_MAIN_PROCESS:
channel_deliver_active_channel = Channel(name="channel_deliver_active_channel") # 主动通道传递通道
channel_deliver_passive_channel = Channel(name="channel_deliver_passive_channel") # 被动通道传递通道
channel_deliver_active_channel = Channel(
name="channel_deliver_active_channel"
) # 主动通道传递通道
channel_deliver_passive_channel = Channel(
name="channel_deliver_passive_channel"
) # 被动通道传递通道
@channel_deliver_passive_channel.on_receive(filter_func=lambda data: data[0] == "set_channel")
@channel_deliver_passive_channel.on_receive(
filter_func=lambda data: data[0] == "set_channel"
)
def on_set_channel(data: tuple[str, dict[str, Any]]):
name, channel = data[1]["name"], data[1]["channel_"]
set_channel(name, channel)
@channel_deliver_passive_channel.on_receive(filter_func=lambda data: data[0] == "get_channel")
@channel_deliver_passive_channel.on_receive(
filter_func=lambda data: data[0] == "get_channel"
)
def on_get_channel(data: tuple[str, dict[str, Any]]):
name, recv_chan = data[1]["name"], data[1]["recv_chan"]
recv_chan.send(get_channel(name))
@channel_deliver_passive_channel.on_receive(filter_func=lambda data: data[0] == "get_channels")
@channel_deliver_passive_channel.on_receive(
filter_func=lambda data: data[0] == "get_channels"
)
def on_get_channels(data: tuple[str, dict[str, Any]]):
recv_chan = data[1]["recv_chan"]
recv_chan.send(get_channels())
@ -231,7 +268,9 @@ def set_channel(name: str, channel: "Channel"):
channel ([`Channel`](#class-channel-generic-t)): 通道实例
"""
if not isinstance(channel, Channel):
raise TypeError(f"channel_ must be an instance of Channel, {type(channel)} found")
raise TypeError(
f"channel_ must be an instance of Channel, {type(channel)} found"
)
if IS_MAIN_PROCESS:
if name in _channel:
@ -241,10 +280,11 @@ def set_channel(name: str, channel: "Channel"):
# 请求主进程设置通道
channel_deliver_passive_channel.send(
(
"set_channel", {
"name" : name,
"channel_": channel,
}
"set_channel",
{
"name": name,
"channel_": channel,
},
)
)
@ -273,13 +313,7 @@ def get_channel(name: str) -> "Channel":
else:
recv_chan = Channel[Channel[Any]]("recv_chan")
channel_deliver_passive_channel.send(
(
"get_channel",
{
"name" : name,
"recv_chan": recv_chan
}
)
("get_channel", {"name": name, "recv_chan": recv_chan})
)
return recv_chan.receive()
@ -294,12 +328,5 @@ def get_channels() -> dict[str, "Channel"]:
return _channel
else:
recv_chan = Channel[dict[str, Channel[Any]]]("recv_chan")
channel_deliver_passive_channel.send(
(
"get_channels",
{
"recv_chan": recv_chan
}
)
)
channel_deliver_passive_channel.send(("get_channels", {"recv_chan": recv_chan}))
return recv_chan.receive()

View File

@ -1,26 +0,0 @@
# -*- coding: utf-8 -*-
"""
本模块用于实现RPC(基于IPC)通信
"""
from typing import TypeAlias, Callable, Any
from liteyuki.comm.channel import Channel
ON_CALLING_FUNC: TypeAlias = Callable[[tuple, dict], Any]
class RPC:
"""
RPC类
"""
def __init__(self, on_calling: ON_CALLING_FUNC) -> None:
self.on_calling = on_calling
def call(self, args: tuple, kwargs: dict) -> Any:
"""
调用
"""
# 获取self.calling函数名
return self.on_calling(args, kwargs)

View File

@ -14,6 +14,9 @@ import threading
from multiprocessing import Process
from typing import Any, Callable, TYPE_CHECKING, TypeAlias
from croterline.context import Context
from croterline.process import SubProcess, ProcessFuncType
from liteyuki.log import logger
from liteyuki.utils import IS_MAIN_PROCESS
@ -26,7 +29,10 @@ from liteyuki.comm import Channel
if IS_MAIN_PROCESS:
from liteyuki.comm.channel import get_channel, publish_channel, get_channels
from liteyuki.comm.storage import shared_memory
from liteyuki.comm.channel import channel_deliver_active_channel, channel_deliver_passive_channel
from liteyuki.comm.channel import (
channel_deliver_active_channel,
channel_deliver_passive_channel,
)
else:
from liteyuki.comm import channel
from liteyuki.comm import storage
@ -34,20 +40,18 @@ else:
TARGET_FUNC: TypeAlias = Callable[..., Any]
TIMEOUT = 10
__all__ = [
"ProcessManager"
]
__all__ = ["ProcessManager", "sub_process_manager"]
multiprocessing.set_start_method("spawn", force=True)
class ChannelDeliver:
def __init__(
self,
active: Channel[Any],
passive: Channel[Any],
channel_deliver_active: Channel[Channel[Any]],
channel_deliver_passive: Channel[tuple[str, dict]],
publish: Channel[tuple[str, Any]],
self,
active: Channel[Any],
passive: Channel[Any],
channel_deliver_active: Channel[Channel[Any]],
channel_deliver_passive: Channel[tuple[str, dict]],
publish: Channel[tuple[str, Any]],
):
self.active = active
self.passive = passive
@ -57,7 +61,9 @@ class ChannelDeliver:
# 函数处理一些跨进程通道的
def _delivery_channel_wrapper(func: TARGET_FUNC, cd: ChannelDeliver, sm: "KeyValueStore", *args, **kwargs):
def _delivery_channel_wrapper(
func: TARGET_FUNC, cd: ChannelDeliver, sm: "KeyValueStore", *args, **kwargs
):
"""
子进程入口函数
处理一些操作
@ -68,8 +74,12 @@ def _delivery_channel_wrapper(func: TARGET_FUNC, cd: ChannelDeliver, sm: "KeyVal
channel.active_channel = cd.active # 子进程主动通道
channel.passive_channel = cd.passive # 子进程被动通道
channel.channel_deliver_active_channel = cd.channel_deliver_active # 子进程通道传递主动通道
channel.channel_deliver_passive_channel = cd.channel_deliver_passive # 子进程通道传递被动通道
channel.channel_deliver_active_channel = (
cd.channel_deliver_active
) # 子进程通道传递主动通道
channel.channel_deliver_passive_channel = (
cd.channel_deliver_passive
) # 子进程通道传递被动通道
channel.publish_channel = cd.publish # 子进程发布通道
# 给子进程创建共享内存实例
@ -102,8 +112,12 @@ class ProcessManager:
chan_active = get_channel(f"{name}-active")
def _start_process():
process = Process(target=self.targets[name][0], args=self.targets[name][1],
kwargs=self.targets[name][2], daemon=True)
process = Process(
target=self.targets[name][0],
args=self.targets[name][1],
kwargs=self.targets[name][2],
daemon=True,
)
self.processes[name] = process
process.start()
@ -133,7 +147,9 @@ class ProcessManager:
for name in self.targets:
logger.debug(f"Starting process {name}")
threading.Thread(target=self._run_process, args=(name, ), daemon=True).start()
threading.Thread(
target=self._run_process, args=(name,), daemon=True
).start()
def add_target(self, name: str, target: TARGET_FUNC, args: tuple = (), kwargs=None):
"""
@ -154,10 +170,14 @@ class ProcessManager:
passive=chan_passive,
channel_deliver_active=channel_deliver_active_channel,
channel_deliver_passive=channel_deliver_passive_channel,
publish=publish_channel
publish=publish_channel,
)
self.targets[name] = (_delivery_channel_wrapper, (target, channel_deliver, shared_memory, *args), kwargs)
self.targets[name] = (
_delivery_channel_wrapper,
(target, channel_deliver, shared_memory, *args),
kwargs,
)
# 主进程通道
def join_all(self):
@ -199,3 +219,54 @@ class ProcessManager:
if name not in self.targets:
logger.warning(f"Process {name} not found.")
return self.processes[name].is_alive()
# new version
class _SubProcessManager:
def __init__(self):
self.processes: dict[str, SubProcess] = {}
def new_process(
self, name: str, *args, **kwargs
) -> Callable[[ProcessFuncType], None]:
def decorator(func: ProcessFuncType):
self.processes[name] = SubProcess(name, func, *args, **kwargs)
return decorator
def add(self, name: str, func: ProcessFuncType, *args, **kwargs):
"""
添加子进程
Args:
func: 子进程函数
name: 子进程名称
args: 子进程函数参数
kwargs: 子进程函数关键字参数
Returns:
"""
self.processes[name] = SubProcess(name, func, *args, **kwargs)
def start(self, name: str):
"""
启动指定子进程
Args:
name: 子进程名称
Returns:
"""
if name not in self.processes:
raise KeyError(f"Process {name} not found.")
self.processes[name].start()
def start_all(self):
"""
启动所有子进程
"""
for name, process in self.processes.items():
process.start()
logger.debug(f"Starting process {name}")
sub_process_manager = _SubProcessManager()

View File

@ -60,7 +60,6 @@ def load_plugin(module_path: str | Path) -> Optional[Plugin]:
f"{metadata.name}({module.__name__.split('.')[-1]})", metadata.type
)
else:
logger.opt(colors=True).warning(
f'The metadata of Liteyuki plugin "{module.__name__}" is not specified, use empty.'
)