1
0
forked from bot/app
This commit is contained in:
2024-08-29 13:50:12 +08:00
parent 3a3ef4d6ae
commit cb3ee4b72f
93 changed files with 1632 additions and 9271 deletions

View File

@ -10,7 +10,6 @@ from typing import Any, Optional
from liteyuki.bot.lifespan import (LIFESPAN_FUNC, Lifespan)
from liteyuki.comm.channel import get_channel
from liteyuki.comm.storage import shared_memory
from liteyuki.core.manager import ProcessManager
from liteyuki.log import init_log, logger
from liteyuki.plugin import load_plugin
@ -63,16 +62,25 @@ class LiteyukiBot:
signal.signal(signal.SIGTERM, self._handle_exit)
atexit.register(self.process_manager.terminate_all) # 注册退出时的函数
def run(self):
async def _run(self):
"""
启动逻辑
"""
self.lifespan.before_start() # 启动前钩子
self.process_manager.start_all()
self.lifespan.after_start() # 启动后钩子
self.keep_alive()
await self.lifespan.before_start() # 启动前钩子
await self.process_manager.start_all()
await self.lifespan.after_start() # 启动后钩子
await self.keep_alive()
def keep_alive(self):
def run(self):
"""
外部启动接口
"""
try:
asyncio.run(self._run())
except KeyboardInterrupt:
logger.info("Liteyuki is stopping...")
async def keep_alive(self):
"""
保持轻雪运行
Returns:
@ -131,9 +139,6 @@ class LiteyukiBot:
name: 进程名称, 默认为None, 所有进程
Returns:
"""
self.lifespan.before_process_shutdown() # 重启前钩子
self.lifespan.before_process_shutdown() # 停止前钩子
if name is not None:
chan_active = get_channel(f"{name}-active")
chan_active.send(1)
@ -230,17 +235,6 @@ class LiteyukiBot:
"""
return self.lifespan.on_after_restart(func)
def on_after_nonebot_init(self, func: LIFESPAN_FUNC):
"""
注册nonebot初始化后的函数
Args:
func:
Returns:
"""
return self.lifespan.on_after_nonebot_init(func)
_BOT_INSTANCE: LiteyukiBot

View File

@ -39,29 +39,17 @@ class Lifespan:
self._before_process_restart_funcs: list[LIFESPAN_FUNC] = []
self._after_restart_funcs: list[LIFESPAN_FUNC] = []
self._after_nonebot_init_funcs: list[LIFESPAN_FUNC] = []
@staticmethod
def run_funcs(funcs: list[LIFESPAN_FUNC | PROCESS_LIFESPAN_FUNC], *args, **kwargs) -> None:
async def run_funcs(funcs: list[ASYNC_LIFESPAN_FUNC | PROCESS_LIFESPAN_FUNC], *args, **kwargs) -> None:
"""
运行函数
并发运行异步函数
Args:
funcs:
Returns:
"""
try:
loop = asyncio.get_event_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
tasks = []
for func in funcs:
if is_coroutine_callable(func):
tasks.append(func(*args, **kwargs))
else:
tasks.append(async_wrapper(func)(*args, **kwargs))
loop.run_until_complete(asyncio.gather(*tasks))
loop = asyncio.get_running_loop()
tasks = [func(*args, **kwargs) if is_coroutine_callable(func) else async_wrapper(func)(*args, **kwargs) for func in funcs]
await asyncio.gather(*tasks)
def on_before_start(self, func: LIFESPAN_FUNC) -> LIFESPAN_FUNC:
"""
@ -131,63 +119,51 @@ class Lifespan:
self._after_restart_funcs.append(func)
return func
def on_after_nonebot_init(self, func):
"""
注册 NoneBot 初始化后的函数
Args:
func:
Returns:
"""
self._after_nonebot_init_funcs.append(func)
return func
def before_start(self) -> None:
async def before_start(self) -> None:
"""
启动前
Returns:
"""
logger.debug("Running before_start functions")
self.run_funcs(self._before_start_funcs)
await self.run_funcs(self._before_start_funcs)
def after_start(self) -> None:
async def after_start(self) -> None:
"""
启动后
Returns:
"""
logger.debug("Running after_start functions")
self.run_funcs(self._after_start_funcs)
await self.run_funcs(self._after_start_funcs)
def before_process_shutdown(self) -> None:
async def before_process_shutdown(self) -> None:
"""
停止前
Returns:
"""
logger.debug("Running before_shutdown functions")
self.run_funcs(self._before_process_shutdown_funcs)
await self.run_funcs(self._before_process_shutdown_funcs)
def after_shutdown(self) -> None:
async def after_shutdown(self) -> None:
"""
停止后
Returns:
"""
logger.debug("Running after_shutdown functions")
self.run_funcs(self._after_shutdown_funcs)
await self.run_funcs(self._after_shutdown_funcs)
def before_process_restart(self) -> None:
async def before_process_restart(self) -> None:
"""
重启前
Returns:
"""
logger.debug("Running before_restart functions")
self.run_funcs(self._before_process_restart_funcs)
await self.run_funcs(self._before_process_restart_funcs)
def after_restart(self) -> None:
async def after_restart(self) -> None:
"""
重启后
Returns:
"""
logger.debug("Running after_restart functions")
self.run_funcs(self._after_restart_funcs)
await self.run_funcs(self._after_restart_funcs)

View File

@ -10,12 +10,12 @@ Copyright (C) 2020-2024 LiteyukiStudio. All Rights Reserved
本模块定义了一个通用的通道类,用于进程间通信
"""
import threading
import asyncio
from multiprocessing import Pipe
from typing import Any, Callable, Coroutine, Generic, Optional, TypeAlias, TypeVar, get_args
from liteyuki.utils import IS_MAIN_PROCESS, is_coroutine_callable, run_coroutine
from liteyuki.log import logger
from liteyuki.utils import IS_MAIN_PROCESS, is_coroutine_callable
T = TypeVar("T")
@ -38,21 +38,22 @@ class Channel(Generic[T]):
有两种接收工作方式,但是只能选择一种,主动接收和被动接收,主动接收使用 `receive` 方法,被动接收使用 `on_receive` 装饰器
"""
def __init__(self, _id: str = "", type_check: Optional[bool] = None):
def __init__(self, name: str, type_check: Optional[bool] = None):
"""
初始化通道
Args:
_id: 通道ID
name: 通道ID
type_check: 是否开启类型检查, 若为空,则传入泛型默认开启,否则默认关闭
"""
self.conn_send, self.conn_recv = Pipe()
self._closed = False
self._on_main_receive_funcs: list[int] = []
self._on_sub_receive_funcs: list[int] = []
self.name: str = _id
self.is_main_receive_loop_running = False
self.is_sub_receive_loop_running = False
self.conn_send, self.conn_recv = Pipe()
self._conn_send_inner, self._conn_recv_inner = Pipe() # 内部通道,用于子进程通信
self._closed = False
self._on_main_receive_func_ids: list[int] = []
self._on_sub_receive_func_ids: list[int] = []
self.name: str = name
self.is_receive_loop_running = False
if type_check is None:
# 若传入泛型则默认开启类型检查
@ -62,6 +63,16 @@ class Channel(Generic[T]):
if self._get_generic_type() is None:
raise TypeError("Type hint is required for enforcing type check.")
self.type_check = type_check
if name in _channel:
raise ValueError(f"Channel {name} already exists")
if IS_MAIN_PROCESS:
if name in _channel:
raise ValueError(f"Channel {name} already exists")
_channel[name] = self
logger.debug(f"Channel {name} initialized in main process")
else:
logger.debug(f"Channel {name} initialized in sub process, should manually set in main process")
def _get_generic_type(self) -> Optional[type]:
"""
@ -105,7 +116,7 @@ class Channel(Generic[T]):
def send(self, data: T):
"""
发送数据
发送数据,发送函数为同步函数,没有异步的必要
Args:
data: 数据
"""
@ -120,7 +131,7 @@ class Channel(Generic[T]):
def receive(self) -> T:
"""
接收数据
同步接收数据,会阻塞线程
Args:
"""
if self._closed:
@ -130,13 +141,15 @@ class Channel(Generic[T]):
data = self.conn_recv.recv()
return data
def close(self):
async def async_receive(self) -> T:
"""
关闭通道
异步接收数据,会挂起等待
"""
self._closed = True
self.conn_send.close()
self.conn_recv.close()
print("等待接收数据")
loop = asyncio.get_running_loop()
data = await loop.run_in_executor(None, self.receive)
print("接收到数据")
return data
def on_receive(self, filter_func: Optional[FILTER_FUNC] = None) -> Callable[[Callable[[T], Any]], Callable[[T], Any]]:
"""
@ -146,11 +159,8 @@ class Channel(Generic[T]):
Returns:
装饰器,装饰一个函数在接收到数据后执行
"""
if (not self.is_sub_receive_loop_running) and not IS_MAIN_PROCESS:
threading.Thread(target=self._start_sub_receive_loop, daemon=True).start()
if (not self.is_main_receive_loop_running) and IS_MAIN_PROCESS:
threading.Thread(target=self._start_main_receive_loop, daemon=True).start()
if not IS_MAIN_PROCESS:
raise RuntimeError("on_receive can only be used in main process")
def decorator(func: Callable[[T], Any]) -> Callable[[T], Any]:
global _func_id
@ -171,65 +181,52 @@ class Channel(Generic[T]):
_callback_funcs[_func_id] = wrapper
if IS_MAIN_PROCESS:
self._on_main_receive_funcs.append(_func_id)
self._on_main_receive_func_ids.append(_func_id)
else:
self._on_sub_receive_funcs.append(_func_id)
self._on_sub_receive_func_ids.append(_func_id)
_func_id += 1
return func
return decorator
def _run_on_main_receive_funcs(self, data: Any):
async def _run_on_receive_funcs(self, data: Any):
"""
运行接收函数
Args:
data: 数据
"""
for func_id in self._on_main_receive_funcs:
func = _callback_funcs[func_id]
run_coroutine(func(data))
if IS_MAIN_PROCESS:
[asyncio.create_task(_callback_funcs[func_id](data)) for func_id in self._on_main_receive_func_ids]
else:
[asyncio.create_task(_callback_funcs[func_id](data)) for func_id in self._on_sub_receive_func_ids]
def _run_on_sub_receive_funcs(self, data: Any):
"""
运行接收函数
Args:
data: 数据
"""
for func_id in self._on_sub_receive_funcs:
func = _callback_funcs[func_id]
run_coroutine(func(data))
def _start_main_receive_loop(self):
async def start_receive_loop(self):
"""
开始接收数据
会自动判断主进程和子进程,需要在对应进程都调度一次
"""
self.is_main_receive_loop_running = True
while not self._closed:
data = self.conn_recv.recv()
self._run_on_main_receive_funcs(data)
if len(self._on_main_receive_func_ids) == 0:
logger.warning(f"No on_receive function registered for {self.name}")
return
def _start_sub_receive_loop(self):
"""
开始接收数据
"""
self.is_sub_receive_loop_running = True
self.is_receive_loop_running = True
logger.debug(f"Starting receive loop for {self.name}")
while not self._closed:
data = self.conn_recv.recv()
self._run_on_sub_receive_funcs(data)
data = await self.async_receive()
await self._run_on_receive_funcs(data)
"""子进程可用的主动和被动通道"""
active_channel: Optional["Channel"] = None
passive_channel: Optional["Channel"] = None
publish_channel: Channel[tuple[str, dict[str, Any]]] = Channel(_id="publish_channel")
active_channel: Channel = Channel(name="active_channel")
passive_channel: Channel = Channel(name="passive_channel")
publish_channel: Channel[tuple[str, dict[str, Any]]] = Channel(name="publish_channel")
"""通道传递通道,主进程创建单例,子进程初始化时实例化"""
channel_deliver_active_channel: Channel[Channel[Any]]
channel_deliver_passive_channel: Channel[tuple[str, dict[str, Any]]]
if IS_MAIN_PROCESS:
channel_deliver_active_channel = Channel(_id="channel_deliver_active_channel")
channel_deliver_passive_channel = Channel(_id="channel_deliver_passive_channel")
channel_deliver_active_channel = Channel(name="channel_deliver_active_channel")
channel_deliver_passive_channel = Channel(name="channel_deliver_passive_channel")
@channel_deliver_passive_channel.on_receive(filter_func=lambda data: data[0] == "set_channel")
@ -250,7 +247,7 @@ if IS_MAIN_PROCESS:
recv_chan.send(get_channels())
def set_channel(name: str, channel: Channel):
def set_channel(name: str, channel: "Channel"):
"""
设置通道实例
Args:
@ -261,20 +258,22 @@ def set_channel(name: str, channel: Channel):
raise TypeError(f"channel_ must be an instance of Channel, {type(channel)} found")
if IS_MAIN_PROCESS:
if name in _channel:
raise ValueError(f"Channel {name} already exists")
_channel[name] = channel
else:
# 请求主进程设置通道
channel_deliver_passive_channel.send(
(
"set_channel", {
"name" : name,
"name" : name,
"channel_": channel,
}
)
)
def set_channels(channels: dict[str, Channel]):
def set_channels(channels: dict[str, "Channel"]):
"""
设置通道实例
Args:
@ -284,7 +283,7 @@ def set_channels(channels: dict[str, Channel]):
set_channel(name, channel)
def get_channel(name: str) -> Channel:
def get_channel(name: str) -> "Channel":
"""
获取通道实例
Args:
@ -308,7 +307,7 @@ def get_channel(name: str) -> Channel:
return recv_chan.receive()
def get_channels() -> dict[str, Channel]:
def get_channels() -> dict[str, "Channel"]:
"""
获取通道实例
Returns:

26
liteyuki/comm/rpc.py Normal file
View File

@ -0,0 +1,26 @@
# -*- coding: utf-8 -*-
"""
本模块用于实现RPC(基于IPC)通信
"""
from typing import TypeAlias, Callable, Any
from liteyuki.comm.channel import Channel
ON_CALLING_FUNC: TypeAlias = Callable[[tuple, dict], Any]
class RPC:
"""
RPC类
"""
def __init__(self, on_calling: ON_CALLING_FUNC) -> None:
self.on_calling = on_calling
def call(self, args: tuple, kwargs: dict) -> Any:
"""
调用
"""
# 获取self.calling函数名
return self.on_calling(args, kwargs)

View File

@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
"""
Copyright (C) 2020-2024 LiteyukiStudio. All Rights Reserved
@Time : 2024/8/25 下午3:54
@Author : snowykami
@Email : snowykami@outlook.com
@File : channelv2.py
@Software: PyCharm
"""
class SocksChannel:
"""
通道类,可以在进程间和进程内通信,双向但同时只能有一个发送者和一个接收者
有两种接收工作方式,但是只能选择一种,主动接收和被动接收,主动接收使用 `receive` 方法,被动接收使用 `on_receive` 装饰器
"""
def __init__(self, name: str):
"""
初始化通道
Args:
name: 通道ID
"""
self._name = name
self._conn_send = None
self._conn_recv = None
self._closed = False
def send(self, data):
"""
发送数据
Args:
data: 数据
"""
pass
def receive(self):
"""
接收数据
Returns:
data: 数据
"""
pass
def close(self):
"""
关闭通道
"""
pass

View File

@ -2,13 +2,13 @@
"""
共享内存模块。类似于redis但是更加轻量级并且线程安全
"""
import asyncio
import threading
from typing import Any, Coroutine, Optional, TypeAlias, Callable
from typing import Any, Callable, Optional
from liteyuki.comm import channel
from liteyuki.comm.channel import Channel, ON_RECEIVE_FUNC, ASYNC_ON_RECEIVE_FUNC
from liteyuki.utils import IS_MAIN_PROCESS, is_coroutine_callable, run_coroutine, run_coroutine_in_thread
from liteyuki.comm.channel import ASYNC_ON_RECEIVE_FUNC, Channel, ON_RECEIVE_FUNC
from liteyuki.utils import IS_MAIN_PROCESS, is_coroutine_callable, run_coroutine_in_thread
if IS_MAIN_PROCESS:
_locks = {}
@ -31,24 +31,13 @@ def _get_lock(key) -> threading.Lock:
raise RuntimeError("Cannot get lock in sub process.")
class Subscriber:
def __init__(self):
self._subscribers = {}
def receive(self) -> Any:
pass
def unsubscribe(self) -> None:
pass
class KeyValueStore:
def __init__(self):
self._store = {}
self.active_chan = Channel[tuple[str, Optional[dict[str, Any]]]](_id="shared_memory-active")
self.passive_chan = Channel[tuple[str, Optional[dict[str, Any]]]](_id="shared_memory-passive")
self.active_chan = Channel[tuple[str, Optional[dict[str, Any]]]](name="shared_memory-active")
self.passive_chan = Channel[tuple[str, Optional[dict[str, Any]]]](name="shared_memory-passive")
self.publish_channel = Channel[tuple[str, Any]](_id="shared_memory-publish")
self.publish_channel = Channel[tuple[str, Any]](name="shared_memory-publish")
self.is_main_receive_loop_running = False
self.is_sub_receive_loop_running = False
@ -170,7 +159,7 @@ class KeyValueStore:
"publish",
{
"channel": channel_,
"data" : data
"data" : data
}
)
)
@ -184,12 +173,8 @@ class KeyValueStore:
Returns:
装饰器
"""
if IS_MAIN_PROCESS and not self.is_main_receive_loop_running:
threading.Thread(target=self._start_receive_loop, daemon=True).start()
shared_memory.is_main_receive_loop_running = True
elif not IS_MAIN_PROCESS and not self.is_sub_receive_loop_running:
threading.Thread(target=self._start_receive_loop, daemon=True).start()
shared_memory.is_sub_receive_loop_running = True
if not IS_MAIN_PROCESS:
raise RuntimeError("Cannot subscribe in sub process.")
def decorator(func: ON_RECEIVE_FUNC) -> ON_RECEIVE_FUNC:
async def wrapper(data: Any):
@ -211,38 +196,29 @@ class KeyValueStore:
return decorator
@staticmethod
def run_subscriber_receive_funcs(channel_: str, data: Any):
async def run_subscriber_receive_funcs(channel_: str, data: Any):
"""
运行订阅者接收函数
Args:
channel_: 频道
data: 数据
"""
if IS_MAIN_PROCESS:
if channel_ in _on_main_subscriber_receive_funcs and _on_main_subscriber_receive_funcs[channel_]:
run_coroutine_in_thread(*[func(data) for func in _on_main_subscriber_receive_funcs[channel_]])
else:
if channel_ in _on_sub_subscriber_receive_funcs and _on_sub_subscriber_receive_funcs[channel_]:
run_coroutine_in_thread(*[func(data) for func in _on_sub_subscriber_receive_funcs[channel_]])
[asyncio.create_task(func(data)) for func in _on_main_subscriber_receive_funcs[channel_]]
def _start_receive_loop(self):
async def start_receive_loop(self):
"""
启动发布订阅接收器循环,在主进程中运行,若有子进程订阅则推送给子进程
"""
if IS_MAIN_PROCESS:
while True:
data = self.active_chan.receive()
if data[0] == "publish":
# 运行主进程订阅函数
self.run_subscriber_receive_funcs(data[1]["channel"], data[1]["data"])
# 推送给子进程
self.publish_channel.send(data)
else:
while True:
data = self.publish_channel.receive()
if data[0] == "publish":
# 运行子进程订阅函数
self.run_subscriber_receive_funcs(data[1]["channel"], data[1]["data"])
if not IS_MAIN_PROCESS:
raise RuntimeError("Cannot start receive loop in sub process.")
while True:
data = await self.active_chan.async_receive()
if data[0] == "publish":
# 运行主进程订阅函数
await self.run_subscriber_receive_funcs(data[1]["channel"], data[1]["data"])
# 推送给子进程
self.publish_channel.send(data)
class GlobalKeyValueStore:
@ -262,7 +238,6 @@ shared_memory: KeyValueStore = GlobalKeyValueStore.get_instance()
# 全局单例访问点
if IS_MAIN_PROCESS:
@shared_memory.passive_chan.on_receive(lambda d: d[0] == "get")
def on_get(data: tuple[str, dict[str, Any]]):
key = data[1]["key"]
@ -289,14 +264,6 @@ if IS_MAIN_PROCESS:
recv_chan = data[1]["recv_chan"]
recv_chan.send(shared_memory.get_all())
else:
# 子进程在入口函数中对shared_memory进行初始化
@channel.publish_channel.on_receive()
def on_publish(data: tuple[str, Any]):
channel_, data = data
shared_memory.run_subscriber_receive_funcs(channel_, data)
_ref_count = 0 # import 引用计数, 防止获取空指针
if not IS_MAIN_PROCESS:
if (shared_memory is None) and _ref_count > 1:

View File

@ -1,4 +1,2 @@
import multiprocessing
from .manager import *

View File

@ -8,13 +8,11 @@ Copyright (C) 2020-2024 LiteyukiStudio. All Rights Reserved
@File : manager.py
@Software: PyCharm
"""
import asyncio
import multiprocessing
import threading
from multiprocessing import Process
from typing import Any, Callable, TYPE_CHECKING, TypeAlias
from liteyuki.comm.channel import Channel, get_channel, set_channels, publish_channel
from liteyuki.comm.storage import shared_memory
from liteyuki.log import logger
from liteyuki.utils import IS_MAIN_PROCESS
@ -22,10 +20,15 @@ if TYPE_CHECKING:
from liteyuki.bot.lifespan import Lifespan
from liteyuki.comm.storage import KeyValueStore
from liteyuki.comm import Channel
if IS_MAIN_PROCESS:
from liteyuki.comm.channel import get_channel, publish_channel, get_channels
from liteyuki.comm.storage import shared_memory
from liteyuki.comm.channel import channel_deliver_active_channel, channel_deliver_passive_channel
else:
from liteyuki.comm import channel
from liteyuki.comm import storage
TARGET_FUNC: TypeAlias = Callable[..., Any]
TIMEOUT = 10
@ -69,7 +72,7 @@ def _delivery_channel_wrapper(func: TARGET_FUNC, cd: ChannelDeliver, sm: "KeyVal
channel.publish_channel = cd.publish # 子进程发布通道
# 给子进程创建共享内存实例
from liteyuki.comm import storage
storage.shared_memory = sm
func(*args, **kwargs)
@ -85,13 +88,12 @@ class ProcessManager:
self.targets: dict[str, tuple[Callable, tuple, dict]] = {}
self.processes: dict[str, Process] = {}
def start(self, name: str):
async def _run_process(self, name: str):
"""
开启后自动监控进程,并添加到进程字典中
开启后自动监控进程,并添加到进程字典中会阻塞请创建task
Args:
name:
Returns:
"""
if name not in self.targets:
raise KeyError(f"Process {name} not found.")
@ -108,30 +110,31 @@ class ProcessManager:
_start_process()
while True:
data = chan_active.receive()
data = await chan_active.async_receive()
if data == 0:
# 停止
logger.info(f"Stopping process {name}")
self.lifespan.before_process_shutdown()
await self.lifespan.before_process_shutdown()
self.terminate(name)
break
elif data == 1:
# 重启
logger.info(f"Restarting process {name}")
self.lifespan.before_process_shutdown()
self.lifespan.before_process_restart()
await self.lifespan.before_process_shutdown()
await self.lifespan.before_process_restart()
self.terminate(name)
_start_process()
continue
else:
logger.warning("Unknown data received, ignored.")
def start_all(self):
async def start_all(self):
"""
启动所有进程
对外启动方法启动所有进程创建asyncio task
"""
for name in self.targets:
threading.Thread(target=self.start, args=(name,), daemon=True).start()
[asyncio.create_task(chan.start_receive_loop()) for chan in get_channels().values()]
[asyncio.create_task(sm.start_receive_loop()) for sm in [shared_memory]]
[asyncio.create_task(self._run_process(name)) for name in self.targets]
def add_target(self, name: str, target: TARGET_FUNC, args: tuple = (), kwargs=None):
"""
@ -144,8 +147,8 @@ class ProcessManager:
"""
if kwargs is None:
kwargs = {}
chan_active: Channel = Channel(_id=f"{name}-active")
chan_passive: Channel = Channel(_id=f"{name}-passive")
chan_active: Channel = Channel(name=f"{name}-active")
chan_passive: Channel = Channel(name=f"{name}-passive")
channel_deliver = ChannelDeliver(
active=chan_active,
@ -157,12 +160,6 @@ class ProcessManager:
self.targets[name] = (_delivery_channel_wrapper, (target, channel_deliver, shared_memory, *args), kwargs)
# 主进程通道
set_channels(
{
f"{name}-active" : chan_active,
f"{name}-passive": chan_passive
}
)
def join_all(self):
for name, process in self.targets:

View File

@ -10,6 +10,7 @@ Copyright (C) 2020-2024 LiteyukiStudio. All Rights Reserved
"""
from typing import Any, Optional
from liteyuki import Channel
from liteyuki.comm.storage import shared_memory
@ -24,7 +25,7 @@ class MessageEvent:
session_id: str,
user_id: str,
session_type: str,
receive_channel: str,
receive_channel: Optional[Channel["MessageEvent"]] = None,
data: Optional[dict[str, Any]] = None,
):
"""
@ -78,7 +79,10 @@ class MessageEvent:
},
bot_id=self.bot_id,
session_id=self.session_id,
user_id=self.user_id,
session_type=self.session_type,
receive_channel="_"
receive_channel=None
)
shared_memory.publish(self.receive_channel, reply_event)
# shared_memory.publish(self.receive_channel, reply_event)
if self.receive_channel:
self.receive_channel.send(reply_event)

View File

@ -23,6 +23,7 @@ _queue: Queue = Queue()
@shared_memory.on_subscriber_receive("event_to_liteyuki")
async def _(event: MessageEvent):
print("AA")
current_priority = -1
for i, matcher in enumerate(_matcher_list):
logger.info(f"Running matcher {matcher} for event: {event}")
@ -32,17 +33,24 @@ async def _(event: MessageEvent):
current_priority = matcher.priority
if matcher.block:
break
else:
logger.info(f"No matcher matched for event: {event}")
print("BB")
def on_message(rule: Rule = empty_rule, priority: int = 0, block: bool = False) -> Matcher:
matcher = Matcher(rule, priority, block)
# 按照优先级插入
def add_matcher(matcher: Matcher):
for i, m in enumerate(_matcher_list):
if m.priority < matcher.priority:
_matcher_list.insert(i, matcher)
break
else:
_matcher_list.append(matcher)
def on_message(rule: Rule = empty_rule, priority: int = 0, block: bool = False) -> Matcher:
matcher = Matcher(rule, priority, block)
# 按照优先级插入
add_matcher(matcher)
return matcher
@ -50,4 +58,5 @@ def on_keywords(keywords: list[str], rule=empty_rule, priority: int = 0, block:
@Rule
async def on_keywords_rule(event: MessageEvent):
return any(keyword in event.raw_message for keyword in keywords)
return on_message(on_keywords_rule & rule, priority, block)

View File

@ -43,7 +43,7 @@ def run_coroutine(*coro: Coroutine):
# 检测是否有现有的事件循环
try:
loop = asyncio.get_event_loop()
loop = asyncio.get_running_loop()
if loop.is_running():
# 如果事件循环正在运行,创建任务
for c in coro:
@ -62,6 +62,7 @@ def run_coroutine(*coro: Coroutine):
# 捕获其他异常,防止协程被重复等待
logger.error(f"Exception occurred: {e}")
def run_coroutine_in_thread(*coro: Coroutine):
"""
在新线程中运行协程
@ -73,6 +74,7 @@ def run_coroutine_in_thread(*coro: Coroutine):
"""
threading.Thread(target=run_coroutine, args=coro, daemon=True).start()
def path_to_module_name(path: Path) -> str:
"""
转换路径为模块名